diff --git a/COMMITTERS b/COMMITTERS new file mode 100644 index 00000000..cad7a5a1 --- /dev/null +++ b/COMMITTERS @@ -0,0 +1,31 @@ +The following people have commit access to the Sakai Project fork +of the Sparse Map Content System sources originally authored +by Ian Boston (ian@tfd.co.uk). Note that this is not a +full list of the authors; for that, you will need to look +over the log messages to see all the patch contributors. + +Committers: + + carl@hallwaytech.com Carl Hall + zach@aeroplanesoftware.com Zach Thomas + chris@media.berkeley.edu Chris Tweney + arwhyte@umich.edu Anthony Whyte + +Contributors: + +For a complete list of contributions please see the commit log. +Contributors include: + + ian@tfd.co.uk Ian Boston (original author) + ray@media.berkeley.edu Ray Davis + cdunstall@csu.edu.au Chris Dunstall + erik.froese@gmail.com Erik Froese + duffy@rsmart.com Duffy Gillman + johnk@media.berkeley.edu John King + kotwal.aadish@gmail.com Aadish Kotwal + droma@csu.edu.au Dave Roma + mark@dishevelled.net Mark Triggs + mawalsh@csu.edu.au Mark Walsh + roberttdev@gmail.com Rob Williams + + diff --git a/CONTRIBUTING b/CONTRIBUTING new file mode 100644 index 00000000..dcc9a57d --- /dev/null +++ b/CONTRIBUTING @@ -0,0 +1,9 @@ +Please read the README and NOTICE files before contributing. +Contributions are very welcome under those terms, but its your responsibility +to ensure that the contributions meet those requirements. + +All patches prior to this file appearing in the code base were contributed under no +explicit policy on accepting patches and Timefields Ltd holds no copyright to those +contributons and makes no assertions as to the IPR status or patent status of those +contributions. + diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..75b52484 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/NOTICE b/NOTICE new file mode 100644 index 00000000..9ccf1b96 --- /dev/null +++ b/NOTICE @@ -0,0 +1,18 @@ +Sparse Content Bundle +Copyright 2011 Timefields Ltd + +The Copyright of patches appearing in the code base prior to the appearance of this NOTICE and CONTRIBUTING file +were accepted under the assumption that re-licensing of those patches, under the Apache 2 Software License to the +Sakai Foundation was acceptable to the contributors, and that there was nothing in the patches that would prevent +that from happening, infringe on any patents or IPR. If you are concerned about this, you can find authors using +the version control system. + +----------------------------------------------------------- + +This product includes software from the The Apache Software Foundation (http://www.apache.org/). + +Patches and contributions made to this code base are made under the terms of the Apache 2 Software License (see para 5). + +Binary distributions of this product contain jars developed and licensed by other third parties, identified by the +LICENSE and NOTICE files included within each jar under the META-INF directory. + diff --git a/README.textile b/README.textile index 087225d1..31ea7e74 100644 --- a/README.textile +++ b/README.textile @@ -1,35 +1,18 @@ h1. Map Content System. -h2. Rational +h2. Rationale - In the Q1 release of Nakamura we had major scalability and concurrency problems caused mainly by our use cases for a content -store not being closely aligned with those of Jackrabbit. We were not able to work around those problems and although we did manage -to release the code, its quite clear that in certain areas Jackrabbit wont work for us. This should not reflect badly on Jackrabbit, -but it is a realization that our use cases are not compatible with Jackrabbit when exposed to scale. +In the Q1 release of Nakamura we had major scalability and concurrency problems caused mainly by our use cases for a content store not being closely aligned with those of Jackrabbit. We were not able to work around those problems and although we did manage to release the code, its quite clear that in certain areas Jackrabbit won't work for us. This should not reflect badly on Jackrabbit, but it is a realization that our use cases are not compatible with Jackrabbit when exposed to scale. - This code base is a reaction to that. It aims to be really simple, completely concurrent with no synchronization and designed to scale -linearly with the number of cores and number of servers in a cluster. To do this it borrows some of the concepts from JCR at a very -abstract level, but is making a positive effort and selfish effort to only provide those things that we absolutely need to have. +This code base is a reaction to that. It aims to be really simple, completely concurrent with no synchronization and designed to scale linearly with the number of cores and number of servers in a cluster. To do this it borrows some of the concepts from JCR at a very abstract level, but is making a positive effort and selfish effort to only provide those things that we absolutely need to have. - This code provides User, Group, Access Control and Content functionality using a sparse Map as a storage abstraction. +This code provides User, Group, Access Control and Content functionality using a sparse Map as a storage abstraction. The implementation works on manipulating sparse objects in the Map with operations like get, insert and delete, but has no understanding of the underlying implementation of the storage mechanism. - The Implementation works on manipulating sparse objects in the Map with operations like get, insert and delete, but -has no understanding of the underlying implementation of the storage mechanism. +At the moment we have 3 storage mechanisms implemented, In Memory using a HashMap, Cassandra and JDBC capable of doing sharded storage, The approach should work on any Column Store (Dynamo, BigTable, Riak, Voldomort, Hbase etc). The JDBC Driver has configuration files for Derby, MySQL, Oracle, PostgreSQL. - At the moment we have 2 storage mechanisms implemented, In Memory using a HashMap, and Cassandra. The approach should -work on any Column Store (Dynamo, BigTable, Riak, Voldomort, Hbase etc) and can also work on RDBMS's including sharded storage. - - At the moment there is no query support, expecting all access to be via column IDs, and multiple views to be written to the -underlying store. - - The intention is to provide write through caches based on EhCache or Infinispan. - - Transactions are supported, if supported by the underlying implementation of the storage, otherwise all operations are BASIC, non Atomic and immediate in nature. -We will add search indexes at some point using Lucene, perhaps in the form of Zoie - - - At this stage its pre-alpha, untested for performance and scalability and incomplete. +Query support is provided by finder messages that use index table written on update. Caching support is via an interface allowing external providers. In Nakamura there is an EhCache implementation and it would be a relatively simple task to write an Infinispan version. Transactions are supported, if supported by the underlying implementation of the storage, otherwise all operations are BASIC, non Atomic and immediate in nature. +Search is provided in the form of a companion project that uses SolrJ 4. h2. Backlog @@ -48,17 +31,13 @@ h2. Completed Backlog # Implement SparseMapUserManager and related classes in th server bundle in Sling. (done 28/11/2010) - - - h2. Tests h3. Memory All performed on a MackBook Pro which is believed to have 4 cores. -Add a user, 1 - 10 threads. Storage is a Concurrent Hash Map. Assuming the Concurrent Hash Map is 100% concurrent, this test -tests the code base for concurrent efficiency. +Add a user, 1 - 10 threads. Storage is a Concurrent Hash Map. Assuming the Concurrent Hash Map is 100% concurrent, this test tests the code base for concurrent efficiency. |Threads|Time(s)|Throughput|Throughput per thread|Speedup|Concurrent Efficiency| | 1| 0.46| 2188| 2188| 1| 100%| @@ -74,7 +53,6 @@ tests the code base for concurrent efficiency. Throughput is users added per second. - h3. JDBC Same as above, using a local MySQL Instance. @@ -93,9 +71,7 @@ Same as above, using a local MySQL Instance. h3. Cassandra -Using an untuned OOTB Cassandra instance running on the same box as the test, fighting for processor Cores. - - +Using an untuned OOTB Cassandra instance running on the same box as the test, fighting for processor Cores. |Threads|Time(s)|Throughput|Throughput per thread|Speedup|Concurrent Efficiency| | 1| 1.14| 873| 873| 1| 100%| @@ -112,7 +88,11 @@ Using an untuned OOTB Cassandra instance running on the same box as the test, fi Throughput is users added per second. +So far it looks like the code is concurrent, but MySQL is considerably slower than Cassandra or Memory. Below the Fighting for cores the box doesn't have enough CPUs to support the DB if present and the code. + + +h2. Contributions, Patches, and License. -So far it looks like the code is concurrent, but MySQL is considerably slower than Cassandra or Memory. Below the Fighting for cores -the box doesn't have enough CPUs to support the DB if present and the code. +The code in this code base is (c) Timefields Ltd and licensed to the Sakai Foundation under a Apache 2 Software License. Before making a contribution by means of a patch or otherwise please ensure that you read and understand the terms under which a patch or contribution will be accepted, outlined in the NOTICES file. All patches and contributions are made under those terms with no exceptions. +I am sorry if this sounds a bit legal, but I want to be able to always license this software to the Sakai Foundation under an Apache 2 license and so I have to insist that no contributions are made that would prevent that from happening. I can't ask everyone who submits a patch to sign a legal document, so this is the next best thing. If you have a problem with this approach, please email me and we can try and work it out. \ No newline at end of file diff --git a/core/pom.xml b/core/pom.xml new file mode 100644 index 00000000..de2b11c6 --- /dev/null +++ b/core/pom.xml @@ -0,0 +1,278 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../pom.xml + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + bundle + 1.5.1-SNAPSHOT + Sparse Map :: Sparse Map Content Storage bundle. + Server that uses a sparse map to represent content mapping closely to a colum database like Cassandra. + + UTF-8 + true + + + scm:git:git://github.com/sakaiproject/sparsemapcontent.git + scm:git:git@github.com:sakaiproject/sparsemapcontent.git + http://github.com/sakaiproject/sparsemapcontent/ + + + + + org.apache.felix + maven-scr-plugin + + + generate-scr-scrdescriptor + + scr + + + core-serviceComponents.xml + + + + + + org.apache.felix + maven-bundle-plugin + true + + + sparse-map + ${project.artifactId} + + org.sakaiproject.nakamura.api.lite.* + + + * + + + org.apache.commons.io; version="1.4", + com.google.common.collect; version="9.0.0", + * + + org.sakaiproject.nakamura.lite.* + + + + + maven-resources-plugin + 2.5 + + + copy-osgi-resources + prepare-package + + copy-resources + + + ${basedir}/target/classes + + + ${basedir}/target/scr-plugin-generated + true + + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.3.2 + + + + test-jar + + test-jar + + + + + + ${basedir}/target/classes/META-INF/MANIFEST.MF + + OSGI-INF/core-serviceComponents.xml,OSGI-INF/serviceComponents.xml + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.5 + + + **/Test*.java + **/*Test.java + **/*TestCase.java + + + + + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + org.apache.felix + maven-scr-plugin + [1.0.0,) + scr + + + + + + org.apache.maven.plugins + maven-install-plugin + [2.3.1,) + install-file + + + + + + org.apache.maven.plugins + maven-enforcer-plugin + [1.0.0,) + enforce + + + + + + + + + + + + + javax.servlet + servlet-api + 2.4 + + + commons-pool + commons-pool + 1.5 + + + commons-lang + commons-lang + 2.5 + + + commons-io + commons-io + 1.4 + + + commons-codec + commons-codec + 1.4 + + + com.googlecode.guava-osgi + guava-osgi + 9.0.0 + + + + org.apache.felix + org.osgi.core + 1.2.0 + provided + + + org.apache.felix + org.osgi.compendium + 1.2.0 + provided + + + + + org.slf4j + slf4j-api + 1.5.10 + + + org.slf4j + slf4j-simple + 1.5.10 + test + + + org.mockito + mockito-all + 1.8.5 + + + + + org.apache.felix + org.apache.felix.scr.annotations + + + org.apache.derby + derby + 10.6.2.1 + test + + + junit + junit + 4.4 + test + + + findbugs + annotations + 1.0.0 + provided + + + + + + + + sakai-maven2 + Sakai Maven Repo + default + http://source.sakaiproject.org/maven2 + + true + ignore + + + false + ignore + + + + + diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/BaseColumnFamilyCacheManager.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/BaseColumnFamilyCacheManager.java new file mode 100644 index 00000000..b57a7dd5 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/BaseColumnFamilyCacheManager.java @@ -0,0 +1,43 @@ +package org.sakaiproject.nakamura.api.lite; + +import java.util.Map; + +public abstract class BaseColumnFamilyCacheManager implements ColumnFamilyCacheManager { + + public Map getAccessControlCache() { + throw new UnsupportedOperationException("Use getCache(String columnFamily)"); + } + + public Map getAuthorizableCache() { + throw new UnsupportedOperationException("Use getCache(String columnFamily)"); + } + + public Map getContentCache() { + throw new UnsupportedOperationException("Use getCache(String columnFamily)"); + } + + /** + * This method deals with backward compatibility of StorageCacheManager which was developed when + * @param configuration + * @param columnFamily + * @param storageCacheManager + * @return + */ + public static Map getCache(Configuration configuration, String columnFamily, + StorageCacheManager storageCacheManager) { + if ( storageCacheManager instanceof ColumnFamilyCacheManager ) { + return ((ColumnFamilyCacheManager) storageCacheManager).getCache(columnFamily); + } + if ( configuration.getAclColumnFamily().equals(columnFamily)) { + return storageCacheManager.getAccessControlCache(); + } + if ( configuration.getAuthorizableColumnFamily().equals(columnFamily)) { + return storageCacheManager.getAuthorizableCache(); + } + if ( configuration.getContentColumnFamily().equals(columnFamily)) { + return storageCacheManager.getContentCache(); + } + return null; + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/CacheHolder.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/CacheHolder.java new file mode 100644 index 00000000..cfcf0fb7 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/CacheHolder.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite; + +import java.util.Map; + +public class CacheHolder { + + private Map o; + private long locker; + private long ttl; + + public CacheHolder(Map o) { + this.o = o; + this.ttl = System.currentTimeMillis()+10000L; + this.locker = -1; + } + public CacheHolder(Map o, long locker) { + this.o = o; + this.ttl = System.currentTimeMillis()+10000L; + this.locker = locker; + } + + public Map get() { + return o; + } + + public boolean isLocked(long managerId) { + if ( locker == -1 || managerId == locker ) { + return false; + } + return (System.currentTimeMillis() < ttl); + } + public boolean wasLockedTo(long managerId) { + return (locker == managerId); + } + +} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/ClientPoolException.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/ClientPoolException.java similarity index 100% rename from src/main/java/org/sakaiproject/nakamura/api/lite/ClientPoolException.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/ClientPoolException.java diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/ColumnFamilyCacheManager.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/ColumnFamilyCacheManager.java new file mode 100644 index 00000000..1134cd18 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/ColumnFamilyCacheManager.java @@ -0,0 +1,10 @@ +package org.sakaiproject.nakamura.api.lite; + +import java.util.Map; + +public interface ColumnFamilyCacheManager extends StorageCacheManager { + + public Map getCache(String columnFamily); + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/CommitHandler.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/CommitHandler.java new file mode 100644 index 00000000..e0c344a6 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/CommitHandler.java @@ -0,0 +1,12 @@ +package org.sakaiproject.nakamura.api.lite; + +/** + * Performs a commit. + * @author ieb + * + */ +public interface CommitHandler { + + void commit(); + +} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/Configuration.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/Configuration.java similarity index 80% rename from src/main/java/org/sakaiproject/nakamura/api/lite/Configuration.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/Configuration.java index 1d3c0522..f57193e8 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/Configuration.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/Configuration.java @@ -18,6 +18,8 @@ package org.sakaiproject.nakamura.api.lite; +import java.util.Map; + /** * An Interface to define configuration for the sparse content store. */ @@ -50,4 +52,25 @@ public interface Configuration { */ String getContentColumnFamily(); + /** + * @return name of the lock column family. + */ + String getLockColumnFamily(); + + /** + * @return the config, shared by all drivers. + */ + Map getSharedConfig(); + + /** + * @return an array of properties names that should be indexed. + */ + String[] getIndexColumnNames(); + + /** + * + * @return an array of index column types + */ + String[] getIndexColumnTypes(); + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/DataFormatException.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/DataFormatException.java new file mode 100644 index 00000000..ebce9904 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/DataFormatException.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite; + +/** + * A more specialized error for StorageClients when + * they are asked to store malformed data. + * + * For example, this exception will be thrown if the + * data to store is too large. + */ +public class DataFormatException extends StorageClientException { + + private static final long serialVersionUID = 1464691562897983604L; + + public DataFormatException(String message, Throwable t) { + super(message, t); + } + + public DataFormatException(String message) { + super(message); + } +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/Feedback.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/Feedback.java new file mode 100644 index 00000000..530deddc --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/Feedback.java @@ -0,0 +1,16 @@ +package org.sakaiproject.nakamura.api.lite; + +import java.io.File; + + +public interface Feedback { + + void log(String format, Object ... params); + + void exception(Throwable e); + + void newLogFile(File currentFile); + + void progress(boolean dryRun, long done, long toDo); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/MigrateContentService.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/MigrateContentService.java new file mode 100644 index 00000000..f8da7710 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/MigrateContentService.java @@ -0,0 +1,30 @@ +package org.sakaiproject.nakamura.api.lite; + +import java.io.IOException; + +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; + +public interface MigrateContentService { + + /** + * @param dryRun + * dry run the migration + * @param limit + * if dry running, limit the number + * @param reindexAll + * if try reindex all + * @param feedback + * a logger to provide feedback to. If you want to control the + * Migration, implement your own logger and throw a + * RuntimeException from the info or debug methods to stop the + * migrator in the case of an emergency. + * @throws ClientPoolException + * @throws StorageClientException + * @throws AccessDeniedException + * @throws IOException + * @throws PropertyMigrationException thrown if there are unresolved dependencies. + */ + void migrate(boolean dryRun, int limit, boolean reindexAll, Feedback feedback) + throws ClientPoolException, StorageClientException, AccessDeniedException, IOException, PropertyMigrationException; + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/MigrationService.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/MigrationService.java new file mode 100644 index 00000000..c2a74adc --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/MigrationService.java @@ -0,0 +1,31 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite; + +@SuppressWarnings({"UnusedDeclaration"}) +public interface MigrationService { + + /** + * Perform upgrades by running the upgrade() methods of all registered PropertyMigrator instances. + * @param dryRun True if you want to run the upgrade without actually changing data; false if you want data changes saved. + * @param verify True if you want to check upgraded data using the PropertyMigrator.verify() method. + * @throws Exception if an unrecoverable error occurred. + */ + void doMigration(boolean dryRun, boolean verify) throws Exception; + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/PropertyMigrationException.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/PropertyMigrationException.java new file mode 100644 index 00000000..1fcf8a09 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/PropertyMigrationException.java @@ -0,0 +1,14 @@ +package org.sakaiproject.nakamura.api.lite; + +public class PropertyMigrationException extends Exception { + + public PropertyMigrationException(String message) { + super(message); + } + + /** + * + */ + private static final long serialVersionUID = -3856860605825678993L; + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/PropertyMigrator.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/PropertyMigrator.java new file mode 100644 index 00000000..fc3ee0b0 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/PropertyMigrator.java @@ -0,0 +1,80 @@ +package org.sakaiproject.nakamura.api.lite; + +import java.util.Map; + +/** + * Implementations of PropertyMigrators registered with OSGi are called by the + * MigrateContentComponent, when its activated (normally disabled). All + * registered implementation will be called, once for each Map within the + * system. If they determine that the map is of the appropriate type and needs + * modification, they should modify it, and return true. If not they should + * leave the map untouched. There is no guarantee in what order each migrator + * might be called. The lack of ordering avoids the situation where one migrator + * has a dependency on another migrator which would require those in production + * to ensure that they had all dependent migrators register. If that becomes a + * requirement then we will need to build a mechanism where migrators can + * express their dependencies and refuse to run if things they depend on are not + * present in the stack..... but perhaps thats what OSGi is for?. If any + * PropertyMigrator modifies a set of properties, the map will be re-saved under + * the same key. If no properties are modified by any PropertyMigrators, then + * the object will be re-indexed with the current index operation. Un-filtered + * access is given to all properties, so anyone implementing this interface must + * take great care not to break referential integrity of each object or + * invalidate the internals of the object. + * + * The MigrateContentComponent is not active by default, and should only be made + * active by an Administrator using the Web UI. + * + * The migrate methods will be called once for every object within the system. + * (could be billions of times). + * + * @author ieb + * + */ +public interface PropertyMigrator { + + /** + * Option: If set to "true" in the option set then the PropertyMigrator will + * only run once, else, the PropertyMigrator will run whenever its present. + */ + public static final String OPTION_RUNONCE = "runonce"; + + /** + * @param rid + * the row id of the current object as loaded from the store. If + * the property representing the key for the type of object is + * changed, this object will be saved under a new rowid. The + * calculation of the rowid depends on the storage implementation + * and the value of the key. + * @param properties + * a map of properties. Implementations are expected to modify + * this map, and return true if modifications are made. + * @return true if any modifications were made to properties, false + * otherwise. + */ + boolean migrate(String rid, Map properties); + + /** + * @return get a list of dependencies that this PropertyMigrator is + * dependent on. If the named dependencies have not already been run + * or are missing from the current set, then the migration will + * refuse to run. The value of each element of getDependencies() + * should match the value of getName() of the implementation of this + * interface on which there is a dependency. + */ + String[] getDependencies(); + + /** + * @return get the name of this dependency, which is used in + * getDependencies(). It must be globally unique over all + * implementations of PropertyMigrator. ie getClass().getName() is a + * reasonable choice. + */ + String getName(); + + /** + * @return get a map of options for the migrator. + */ + Map getOptions(); + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/Disposable.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/RemoveProperty.java similarity index 82% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/Disposable.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/RemoveProperty.java index 85c446f2..e1e1b82f 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/Disposable.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/RemoveProperty.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Sakai Foundation (SF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -15,16 +15,8 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.storage; - -/** - * Things that are disposable, must be closed. - * - * @author ieb - * - */ -public interface Disposable { +package org.sakaiproject.nakamura.api.lite; - void close(); +public class RemoveProperty { } diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/Repository.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/Repository.java similarity index 79% rename from src/main/java/org/sakaiproject/nakamura/api/lite/Repository.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/Repository.java index eac78a01..8b738018 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/Repository.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/Repository.java @@ -22,9 +22,17 @@ /** * Repository container that provides a mechanism to login to the sparse content * store. + * @since 1.0 */ public interface Repository { + /** + * The prefix on all system properties in the repository. Anything prefixed + * with this is a system proper anything not prefixed with this is not a + * system property. + */ + public static final String SYSTEM_PROP_PREFIX = "_"; + /** * Login with a user name and password * @@ -39,6 +47,7 @@ public interface Repository { * If there was a problem with the storage pool. * @throws AccessDeniedException * If the user was denied access. + * @since 1.0 */ Session login(String username, String password) throws ClientPoolException, StorageClientException, AccessDeniedException; @@ -53,6 +62,7 @@ Session login(String username, String password) throws ClientPoolException, * If there was a problem with the storage pool. * @throws AccessDeniedException * If the anon was denied access. + * @since 1.0 */ Session login() throws ClientPoolException, StorageClientException, AccessDeniedException; @@ -66,6 +76,7 @@ Session login(String username, String password) throws ClientPoolException, * If there was a problem with the storage pool. * @throws AccessDeniedException * If admin was denied access. + * @since 1.0 */ Session loginAdministrative() throws ClientPoolException, StorageClientException, AccessDeniedException; @@ -82,8 +93,24 @@ Session loginAdministrative() throws ClientPoolException, StorageClientException * If there was a problem with the storage pool. * @throws AccessDeniedException * If the user was denied access. + * @since 1.0 */ Session loginAdministrative(String username) throws ClientPoolException, StorageClientException, AccessDeniedException; + /** + * Perform an administrative login bypassing login enabled checks. Only + * internal system operations should use this. Anything related to a login + * should never use use. + * + * @param username + * @return + * @throws StorageClientException + * @throws ClientPoolException + * @throws AccessDeniedException + * @since 1.4 + */ + Session loginAdministrativeBypassEnable(String username) throws StorageClientException, + ClientPoolException, AccessDeniedException; + } diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/Session.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/Session.java similarity index 84% rename from src/main/java/org/sakaiproject/nakamura/api/lite/Session.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/Session.java index ce48646c..963b0a32 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/Session.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/Session.java @@ -21,6 +21,7 @@ import org.sakaiproject.nakamura.api.lite.accesscontrol.Authenticator; import org.sakaiproject.nakamura.api.lite.authorizable.AuthorizableManager; import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.sakaiproject.nakamura.api.lite.lock.LockManager; /** * A lightweight container bound to the user that will maintain state associated @@ -56,6 +57,9 @@ public interface Session { * @throws StorageClientException */ ContentManager getContentManager() throws StorageClientException; + + + LockManager getLockManager() throws StorageClientException; /** * @return the userID that this session is bound to. @@ -66,4 +70,16 @@ public interface Session { Repository getRepository(); + /** + * Perform a commit on any pending operations. + */ + void commit(); + + /** + * Add a commit handler for a certain key. Will replace any other commit handler of the same key. + * @param key + * @param commitHandler + */ + void addCommitHandler(String key, CommitHandler commitHandler); + } diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/CacheHolder.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/SessionAdaptable.java similarity index 78% rename from src/main/java/org/sakaiproject/nakamura/api/lite/CacheHolder.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/SessionAdaptable.java index df59b8b5..b4108634 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/CacheHolder.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/SessionAdaptable.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Sakai Foundation (SF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -17,18 +17,8 @@ */ package org.sakaiproject.nakamura.api.lite; -import java.util.Map; - -public class CacheHolder { - - private Map o; - - public CacheHolder(Map o) { - this.o = o; - } - - public Map get() { - return o; - } +public interface SessionAdaptable { + + Session getSession(); } diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/SparseSessionTracker.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/SparseSessionTracker.java new file mode 100644 index 00000000..0a329e8d --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/SparseSessionTracker.java @@ -0,0 +1,37 @@ +package org.sakaiproject.nakamura.api.lite; + +import javax.servlet.http.HttpServletRequest; + +/** + * Tracks sessions and provides a mechanism to retrieve them, based on request + * or thread. Retrieval on thread should only be used where its know that the + * underlying request processing model will the thread based. If event based + * processing is being used, no assumption about thread should be made. + * + * @author ieb + * + */ +public interface SparseSessionTracker { + + /** + * Register a session against a request. + * + * @param login + * the session to be registered + * @param request + * the request to register against, if null registration will be + * performed on the thread and not on the thread. + * @return the session just registered. + */ + Session register(Session login, HttpServletRequest request); + + /** + * @param request + * the request to get the session from, if null, the thread will + * be inspected. + * @return the session that was previously registered, or null if no session + * was registered. + */ + Session get(HttpServletRequest request); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageCacheManager.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageCacheManager.java new file mode 100644 index 00000000..d6c845c2 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageCacheManager.java @@ -0,0 +1,64 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite; + +import java.util.Map; + +/** + * Provides Cache implementations for all the three areas represented as Maps. + * If an implementation of this interface is present it will be used. + */ +public interface StorageCacheManager { + + /** + * @return a Cache, implementing the Map interface, although the keys wont + * clash should be a separate memory space from the other caches to + * prevent memory poisoning. It would be theoretically possible to + * generate a cache ID for some content that could be shared in the + * authorizable or access control space, however thats finding a key + * matching a pattern that collides with a specific pattern after + * both have been hashed with SHA1. The probability or random + * collision in SHA1 is 1 in 1E14, so generating a collision for 2 + * string matching specific patterns is probably far greater than + * that. + */ + Map getAccessControlCache(); + + /** + * @return Should be a separate cache, not sharing the same memory space as + * others, see above for why. + */ + Map getAuthorizableCache(); + + /** + * @return Should be a separate cache, not sharing the same memory space as + * others, see above for why. + */ + Map getContentCache(); + + + /** + * Get a named cache. + * @param cacheName + * @return + */ + Map getCache(String cacheName); + + + +} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientException.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientException.java similarity index 100% rename from src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientException.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientException.java diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientUtils.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientUtils.java similarity index 70% rename from src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientUtils.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientUtils.java index cf94491b..950b3aea 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientUtils.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageClientUtils.java @@ -20,13 +20,20 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.time.FastDateFormat; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; import org.sakaiproject.nakamura.api.lite.util.Type1UUID; +import org.sakaiproject.nakamura.lite.storage.spi.types.Types; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; import java.io.UnsupportedEncodingException; import java.lang.reflect.Method; import java.security.MessageDigest; @@ -34,6 +41,7 @@ import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; +import java.util.Collections; import java.util.Date; import java.util.Locale; import java.util.Map; @@ -56,11 +64,7 @@ public class StorageClientUtils { * Default hashing algorithm for passwords */ public final static String SECURE_HASH_DIGEST = "SHA-512"; - /** - * Charset for encoding byte data as char - */ - public static final char[] URL_SAFE_ENCODING = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890" - .toCharArray(); + /** * Based on JackRabbit: Jackrabbit uses a subset of 8601 (8601:2000) for * their date times. @@ -68,7 +72,7 @@ public class StorageClientUtils { public static String ISO8601_JCR_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSSZZ"; @SuppressWarnings("unused") private final static FastDateFormat ISO8601_JCR_FORMAT = FastDateFormat.getInstance( - ISO8601_JCR_PATTERN, TimeZone.getTimeZone("UTC"), Locale.ROOT); + ISO8601_JCR_PATTERN, TimeZone.getTimeZone("UTC"), Locale.ENGLISH); private static final Logger LOGGER = LoggerFactory.getLogger(StorageClientUtils.class); @@ -81,6 +85,8 @@ public class StorageClientUtils { * @param object * the storage object * @return a string representation of the storage object. + * @deprecated the application code should convert to a string if necessary, + * this is not required for storage any more. */ @Deprecated public static String toString(Object object) { @@ -123,6 +129,10 @@ public static String getAltField(String field, String streamId) { * @param object * the object to place in store. * @return the Store representation of the object. + * @deprecated Objects do not need to be converted when placing in the + * store, provided they are one of the ones listed in + * {@link Types.ALLTYPES}. If they are not your code should + * convert to one or more of those types. */ @Deprecated public static Object toStore(Object object) { @@ -135,6 +145,9 @@ public static Object toStore(Object object) { * @param value * the store object * @return a byte[] of the store object. + * @deprecated if its a byte[] just use it as a byte[] otherwise convert to + * a byte[] before storing. eg + * String.valueOf(value).getBytes("UTF-8") */ @Deprecated public static byte[] toBytes(Object value) { @@ -167,7 +180,7 @@ public static String getParentObjectPath(String objectPath) { return "/"; } int i = objectPath.lastIndexOf('/'); - if (i == objectPath.length() - 1) { + while (i == objectPath.length() - 1) { i = objectPath.substring(0, i).lastIndexOf('/'); } String res = objectPath; @@ -185,6 +198,9 @@ public static String getParentObjectPath(String objectPath) { * element in the path. */ public static String getObjectName(String objectPath) { + if ( objectPath == null || "".equals(objectPath)) { + return ""; + } if ("/".equals(objectPath)) { return "/"; } @@ -209,6 +225,15 @@ public static String getObjectName(String objectPath) { */ // TODO: Unit test public static String insecureHash(String naked) { + try { + return insecureHash(naked.getBytes(UTF8)); + } catch (UnsupportedEncodingException e3) { + LOGGER.error("no UTF-8 Envoding, get a real JVM, nothing will work here. NPE to come"); + return null; + } + } + + public static String insecureHash(byte[] b) { try { MessageDigest md; try { @@ -219,12 +244,11 @@ public static String insecureHash(String naked) { } catch (NoSuchAlgorithmException e2) { LOGGER.error("You have no Message Digest Algorightms intalled in this JVM, secure Hashes are not availalbe, encoding bytes :" + e2.getMessage()); - return encode(StringUtils.leftPad(naked, 10, '_').getBytes(UTF8), - URL_SAFE_ENCODING); + return encode(StringUtils.leftPad((new String(b,"UTF-8")), 10, '_').getBytes(UTF8)); } } - byte[] bytes = md.digest(naked.getBytes(UTF8)); - return encode(bytes, URL_SAFE_ENCODING); + byte[] bytes = md.digest(b); + return encode(bytes); } catch (UnsupportedEncodingException e3) { LOGGER.error("no UTF-8 Envoding, get a real JVM, nothing will work here. NPE to come"); return null; @@ -250,13 +274,12 @@ public static String secureHash(String password) { } catch (NoSuchAlgorithmException e2) { LOGGER.error("You have no Message Digest Algorightms intalled in this JVM, secure Hashes are not availalbe, encoding bytes :" + e2.getMessage()); - return encode(StringUtils.leftPad(password, 10, '_').getBytes(UTF8), - URL_SAFE_ENCODING); + return encode(StringUtils.leftPad(password, 10, '_').getBytes(UTF8)); } } } byte[] bytes = md.digest(password.getBytes(UTF8)); - return encode(bytes, URL_SAFE_ENCODING); + return encode(bytes); } catch (UnsupportedEncodingException e3) { LOGGER.error("no UTF-8 Envoding, get a real JVM, nothing will work here. NPE to come"); return null; @@ -273,46 +296,31 @@ public static String secureHash(String password) { * the shorter it is the longer the result. Dont be dumb and use * an encoding size of < 2. * @return + * @deprecated use encode(byte[]) */ + @Deprecated public static String encode(byte[] hash, char[] encode) { - StringBuilder sb = new StringBuilder((hash.length * 15) / 10); - int x = (int) (hash[0] + 128); - int xt = 0; - int i = 0; - while (i < hash.length) { - if (x < encode.length) { - i++; - if (i < hash.length) { - if (x == 0) { - x = (int) (hash[i] + 128); - } else { - x = (x + 1) * (int) (hash[i] + 128); - } - } else { - sb.append(encode[x]); - break; - } - } - xt = x % encode.length; - x = x / encode.length; - sb.append(encode[xt]); - } - - return sb.toString(); + return encode(hash); + } + + public static String encode(byte[] hash) { + return Base64.encodeBase64URLSafeString(hash); } /** * Converts to an Immutable map, with keys that are in the filter not - * transdered. Nested maps are also transfered. + * transfered. Nested maps are also transfered. * - * @param - * @param - * @param source - * @param filter - * @return + * @param the type of the key + * @param the type of the value + * @param source a map of values to start with + * @param modified a map to oveeride values in source + * @param include if not null, only include these keys in the returned map + * @param exclude if not null, exclude these keys from the returned map + * @return a map with the modifications applied and filtered by the includes and excludes */ @SuppressWarnings("unchecked") - public static Map getFilterMap(Map source, Map modified, Set include, Set exclude) { + public static Map getFilterMap(Map source, Map modified, Set include, Set exclude, boolean includingRemoveProperties ) { if ((modified == null || modified.size() == 0) && (include == null) && ( exclude == null || exclude.size() == 0)) { if ( source instanceof ImmutableMap ) { return source; @@ -320,6 +328,7 @@ public static Map getFilterMap(Map source, Map modified return ImmutableMap.copyOf(source); } } + Builder filteredMap = new ImmutableMap.Builder(); for (Entry e : source.entrySet()) { K k = e.getKey(); @@ -329,7 +338,9 @@ public static Map getFilterMap(Map source, Map modified V o = modified.get(k); if (o instanceof Map) { filteredMap.put(k, - (V) getFilterMap((Map) o, null, null, exclude)); + (V) getFilterMap((Map) o, null, null, exclude, includingRemoveProperties)); + } else if ( includingRemoveProperties ) { + filteredMap.put(k, o); } else if ( !(o instanceof RemoveProperty) ) { filteredMap.put(k, o); } @@ -337,7 +348,7 @@ public static Map getFilterMap(Map source, Map modified Object o = e.getValue(); if (o instanceof Map) { filteredMap.put(k, - (V) getFilterMap((Map) e.getValue(), null, null, exclude)); + (V) getFilterMap((Map) e.getValue(), null, null, exclude, includingRemoveProperties)); } else { filteredMap.put(k, e.getValue()); } @@ -365,9 +376,9 @@ public static Map getFilterMap(Map source, Map modified * over depth of nesting. Keys in the filter set are not transfered * Resulting map is mutable. * - * @param source - * @param filter - * @return + * @param source a map of values to modify + * @param filter a map of values to remove by key from source + * @return the map less any keys from filter */ @SuppressWarnings("unchecked") public static Map getFilteredAndEcodedMap(Map source, @@ -391,7 +402,7 @@ public static Map getFilteredAndEcodedMap(Map so * @return a UUID, compact encoded, suitable for use in URLs */ public static String getUuid() { - return StorageClientUtils.encode(Type1UUID.next(), StorageClientUtils.URL_SAFE_ENCODING); + return StorageClientUtils.encode(Type1UUID.next()); } /** @@ -424,6 +435,7 @@ public static long toLong(Object object) { * @param object * @return the store object as a {@link Calendar} * @throws ParseException + * @deprecated no need to convert, just get the calendar object directly out of the store. */ @Deprecated public static Calendar toCalendar(Object object) throws ParseException { @@ -432,7 +444,7 @@ public static Calendar toCalendar(Object object) throws ParseException { } else if (object == null || object instanceof RemoveProperty) { return null; } - final SimpleDateFormat sdf = new SimpleDateFormat(ISO8601_JCR_PATTERN, Locale.ROOT); + final SimpleDateFormat sdf = new SimpleDateFormat(ISO8601_JCR_PATTERN, Locale.ENGLISH); final Date date = sdf.parse(toString(object)); final Calendar c = Calendar.getInstance(); c.setTime(date); @@ -469,7 +481,22 @@ public static String newPath(String path, String child) { */ @SuppressWarnings("unchecked") public static T getSetting(Object setting, T defaultValue) { - if (setting != null) { + if (setting != null && defaultValue != null) { + if (defaultValue.getClass().isAssignableFrom(setting.getClass())) { + return (T) setting; + } + // handle conversions + if ( defaultValue instanceof Long ) { + return (T) new Long(String.valueOf(setting)); + } else if ( defaultValue instanceof Integer ) { + return (T) new Integer(String.valueOf(setting)); + } else if (defaultValue instanceof Boolean ) { + return (T) Boolean.valueOf(String.valueOf(setting)); + } else if ( defaultValue instanceof Double ) { + return (T) new Double(String.valueOf(setting)); + } else if ( defaultValue instanceof String[] ) { + return (T) StringUtils.split(String.valueOf(setting), ','); + } return (T) setting; } return defaultValue; @@ -516,8 +543,8 @@ public static String arrayUnEscape(String string) { /** * @param object * @return null or the store object converted to a string[] + * @deprecated no need to convert, just get the String[] object directly out of the store. */ - // TODO: Unit test @Deprecated public static String[] toStringArray(Object object) { if ( object instanceof String[] ) { @@ -535,10 +562,10 @@ public static String[] toStringArray(Object object) { /** * @param object - * @return null or the store object converted to a string[] + * @return null or the store object converted to a Calendar[] * @throws ParseException + * @deprecated no need to convert, just get the Calendar[] object directly out of the store. */ - // TODO: Unit test @Deprecated public static Calendar[] toCalendarArray(Object object) throws ParseException { if ( object instanceof Calendar[] ) { @@ -592,6 +619,14 @@ public static Session adaptToSession(Object source) { } } + /** + * Make the method on the target object accessible and then invoke it. + * @param target the object with the method to invoke + * @param methodName the name of the method to invoke + * @param args the arguments to pass to the invoked method + * @param argsTypes the types of the arguments being passed to the method + * @return + */ private static Object safeMethod(Object target, String methodName, Object[] args, @SuppressWarnings("rawtypes") Class[] argsTypes) { if (target != null) { @@ -608,9 +643,106 @@ private static Object safeMethod(Object target, String methodName, Object[] args return null; } + /** + * @param property + * @return + * @deprecated no need to convert, just get the Boolean object directly out of the store. + */ @Deprecated public static boolean toBoolean(Object property) { return "true".equals(StorageClientUtils.toString(property)); } + /** + * Delete an entire tree starting from the deepest part of the tree and + * working back up. Will stop the moment a permission denied is encountered + * either for read or for delete. + * + * @param contentManager + * @param path + * @throws AccessDeniedException + * @throws StorageClientException + */ + public static void deleteTree(ContentManager contentManager, String path) + throws AccessDeniedException, StorageClientException { + Content content = contentManager.get(path); + if (content != null) { + for (String childPath : content.listChildPaths()) { + deleteTree(contentManager, childPath); + } + } + contentManager.delete(path); + } + + public static String getInternalUuid() { + return getUuid()+"+"; // URL safe base 64 does not use + chars + } + + public static void copyTree(ContentManager contentManager, String sourcePath, String destPath, + boolean withStreams) throws StorageClientException, AccessDeniedException, IOException { + contentManager.copy(sourcePath, destPath, withStreams); + LOGGER.info("Copied {} to {} ", sourcePath, destPath ); + Content content = contentManager.get(sourcePath); + if (content != null) { + for (String childPath : content.listChildPaths()) { + String name = StorageClientUtils.getObjectName(childPath); + String childSourcePath = StorageClientUtils.newPath(sourcePath, name); + String childDestPath = StorageClientUtils.newPath(destPath, name); + copyTree(contentManager, childSourcePath, childDestPath, withStreams); + } + } + } + + public static void dumpTree(Content content) { + if ( content != null ) { + LOGGER.info("Path {} ",content.getPath()); + for ( Content child : content.listChildren() ) { + dumpTree(child); + } + } + } + + /** + * Perform a set difference between the properties of content0 and + * content1. The inputs are not modified. + * + * @param content0 + * @param content1 + * @return A {@link Set} of {@link String} that are the keys of properties that are in + * content0 but not in content1. + */ + public static Set diffProps(Content content0, Content content1) { + if (content0 == null) { + return Collections.emptySet(); + } + if (content1 == null) { + return content0.getProperties().keySet(); + } + + return diffKeys(content0.getProperties(), content1.getProperties()); + } + + /** + * Perform a set difference between the keys of keys0 and + * keys1. The inputs are not modified. + * + * @param keys0 + * @param key1 + * @return A {@link Set} of {@link String} that are the keys that are in + * content0 but not in content1. + */ + public static Set diffKeys(Map keys0, Map keys1) { + if (keys1 == null || keys1.size() == 0) { + return keys0.keySet(); + } + if (keys0 == null || keys0.size() == 0) { + return Collections.emptySet(); + } + + // collect the keys that exist + Set retKeys = Sets.newHashSet(keys0.keySet()); + // remove any keys that we're updating which leaves the keys we should remove + retKeys.removeAll(keys1.keySet()); + return retKeys; + } } diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageConstants.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageConstants.java new file mode 100644 index 00000000..542de652 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/StorageConstants.java @@ -0,0 +1,59 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite; + +public class StorageConstants { + + /** + * Property used to select a set of query statements in the finder. These must exist in + * the driver configuration and are intended to allow institutions to optimize certain + * queries. If not present, a the default set will be used. + */ + public static final String CUSTOM_STATEMENT_SET = "_statementset"; + + /** + * Property used to set the maximum number of items a query should return per page. + * The starting row of the query is determined by the page number. + * Defaults to 25. + */ + public static final String ITEMS = "_items"; + + + /** + * Page number to start at, defaults to 0. + */ + public static final String PAGE = "_page"; + + /** + * The column on which to perform a sort. + */ + public static final String SORT = "_sort"; + + /** + * If present Raw Results will be returned as string values for each record. + */ + public static final String RAWRESULTS = "_rawresults"; + + /** + * If true, then the query cache may be used. key-value pairs of the query must uniquely identify the query, and + * cache must be cleared by other means. This is a big ask and requires effort. + */ + public static final String CACHEABLE = "_cacheable"; + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/StoreListener.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/StoreListener.java new file mode 100644 index 00000000..354a1501 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/StoreListener.java @@ -0,0 +1,91 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite; + +import java.util.Map; + +/** + * The StorageListener is notified when actions are performed objects in storage. + */ +public interface StoreListener { + public static final String TOPIC_BASE = "org/sakaiproject/nakamura/lite/"; + public static final String DELETE_TOPIC = "DELETE"; + public static final String ADDED_TOPIC = "ADDED"; + public static final String UPDATED_TOPIC = "UPDATED"; + public static final String DEFAULT_DELETE_TOPIC = TOPIC_BASE + DELETE_TOPIC; + public static final String DEFAULT_CREATE_TOPIC = TOPIC_BASE + ADDED_TOPIC; + public static final String DEFAULT_UPDATE_TOPIC = TOPIC_BASE + UPDATED_TOPIC; + public static final String[] DEFAULT_TOPICS = new String[] { DEFAULT_CREATE_TOPIC, + DEFAULT_UPDATE_TOPIC, DEFAULT_DELETE_TOPIC, + TOPIC_BASE + "authorizables/" + DELETE_TOPIC, + TOPIC_BASE + "groups/" + DELETE_TOPIC, + TOPIC_BASE + "users/" + DELETE_TOPIC, + TOPIC_BASE + "admin/" + DELETE_TOPIC, + TOPIC_BASE + "content/" + DELETE_TOPIC, + TOPIC_BASE + "authorizables/"+ADDED_TOPIC, + TOPIC_BASE + "groups/"+ADDED_TOPIC, + TOPIC_BASE + "users/"+ADDED_TOPIC, + TOPIC_BASE + "admin/"+ADDED_TOPIC, + TOPIC_BASE + "content/"+ADDED_TOPIC, + TOPIC_BASE + "authorizables/"+UPDATED_TOPIC, + TOPIC_BASE + "groups/"+UPDATED_TOPIC, + TOPIC_BASE + "users/"+UPDATED_TOPIC, + TOPIC_BASE + "admin/"+UPDATED_TOPIC, + TOPIC_BASE + "content/"+UPDATED_TOPIC }; + public static final String USERID_PROPERTY = "userid"; + public static final String PATH_PROPERTY = "path"; + public static final String RESOURCE_TYPE_PROPERTY = "resourceType"; + public static final String BEFORE_EVENT_PROPERTY = "_beforeEvent"; + + /** + * onDelete is called after an object has been deleted. + * @param zone an identifier for the type of object being acted upon + * @param path the path to the object + * @param user the user logged in causing this action + * @param resourceType the resource type of the item, if known. + * @param beforeEvent the properties of the object before it was deleted + * @param attributes properties of the event itself + */ + void onDelete(String zone, String path, String user, String resourceType, Map beforeEvent, String... attributes); + + /** + * onUpdate is called after an object has been updated. + * @param zone an identifier for the type of object being acted upon + * @param path the path to the object + * @param user the user logged in causing this action + * @param resourceType the resource type of the item, if known. + * @param beforeEvent the properties of the object before it was updated + * @param attributes properties of the event itself + */ + void onUpdate(String zone, String path, String user, String resourceType, boolean isNew, Map beforeEvent, String... attributes); + + /** + * onLogin is called when a user logs in and creates a new {@link Session} + * @param userid + * @param sessionID + */ + void onLogin(String userid, String sessionID); + + /** + * onLogout is called when a user logs out of their {@link Session} + * @param userid + * @param sessionID + */ + void onLogout(String userid, String sessionID); + +} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessControlManager.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessControlManager.java similarity index 70% rename from src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessControlManager.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessControlManager.java index 50c4de96..76c6c15b 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessControlManager.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessControlManager.java @@ -19,6 +19,8 @@ import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.lite.accesscontrol.PropertyAcl; import java.util.Map; @@ -27,6 +29,20 @@ */ public interface AccessControlManager { + /** + * Dynamic ACEs keys start with this value. Everything after the _tp_ is + * interpreted by the PrincipalTokenResolver to load a Content item + * containing the principal data. The content item must validate against the + * ACL item to be used. + */ + public static final String DYNAMIC_PRINCIPAL_STEM = "_tp_"; + + /** + * Property ACEs start with this value. Property ACEs have the form + * _pp_@@ + */ + public static final String PROPERTY_PRINCIPAL_STEM = "_pp_"; + /** * Get an ACL at an object of a defined type. Do not look at parent objects * @@ -136,4 +152,47 @@ boolean can(Authorizable authorizable, String objectType, String objectPath, String[] findPrincipals(String objectType, String objectPath, int permission, boolean granted) throws StorageClientException; + /** + * Bind a PrincipalTokenResolver to the Access Manager request. + * @param principalTokenResolver the principal resolver to use with this acl request. + */ + void setRequestPrincipalResolver(PrincipalTokenResolver principalTokenResolver); + + + /** + * Unbind a PrincipalTokenResolver from the Access Manager. + */ + void clearRequestPrincipalResolver(); + + + /** + * This methods signs a token with the shared Key of the objectPath Content + * ACL and modifies the token properties with the signature. (using a HMAC + * based signature). It is the responsibility of the calling code to save + * the modified token. + * + * @param token + * the token to be signed + * @param objectType + * the type of the ACL path. + * @param objectPath + * the ACL path to use for signing + * @throws StorageClientException + * @throws AccessDeniedException + */ + void signContentToken(Content token, String objectType, String objectPath) throws StorageClientException, + AccessDeniedException; + + + /** + * Get the property ACL applicable to the current user on the specified path. + * @param objectType the type of the object + * @param objectPath the path to the object + * @return + * @throws AccessDeniedException + * @throws StorageClientException + */ + PropertyAcl getPropertyAcl(String objectType, String objectPath) throws AccessDeniedException, StorageClientException; + + } diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessDeniedException.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessDeniedException.java similarity index 100% rename from src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessDeniedException.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AccessDeniedException.java diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModification.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModification.java similarity index 80% rename from src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModification.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModification.java index 4803c99b..1a69c8ae 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModification.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModification.java @@ -1,3 +1,20 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ package org.sakaiproject.nakamura.api.lite.accesscontrol; import com.google.common.collect.Lists; @@ -6,6 +23,7 @@ import java.util.Map; import java.util.Map.Entry; + /** * Specification of a modification to be applied to an ACL. */ @@ -186,7 +204,13 @@ public static String getPrincipal(String principalKey) { if (principalKey.length() <= GRANTED_MARKER.length()) { return null; } - return principalKey.substring(0, principalKey.length()-GRANTED_MARKER.length()); + if ( principalKey.endsWith(GRANTED_MARKER) ) { + return principalKey.substring(0, principalKey.length()-GRANTED_MARKER.length()); + } else if ( principalKey.endsWith(DENIED_MARKER) ) { + return principalKey.substring(0, principalKey.length()-DENIED_MARKER.length()); + } else { + return null; + } } @@ -201,4 +225,8 @@ public static Permission[] listPermissions(int perms) { return permissions.toArray(new Permission[permissions.size()]); } + public static String getPropertyKey(String id, String propertyName) { + return AccessControlManager.PROPERTY_PRINCIPAL_STEM+id+"@"+propertyName; + } + } diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Authenticator.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Authenticator.java similarity index 82% rename from src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Authenticator.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Authenticator.java index b1ff6fcf..fa8f412d 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Authenticator.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Authenticator.java @@ -21,6 +21,7 @@ /** * Authenticates a user + * @since 1.0 */ public interface Authenticator { @@ -33,14 +34,24 @@ public interface Authenticator { * password for the user * @return the user object for the user or null if the authentication * attempt is not valid. + * @since 1.0 */ User authenticate(String userid, String password); /** - * perform a system authentiation, trusting the userId. + * perform a system authentication, trusting the userId. * @param userid * @return the User object if the userID exists. + * @since 1.0 */ User systemAuthenticate(String userid); + /** + * perform a system authentication bypassing enable login checks + * @param userid + * @return + * @since 1.4 + */ + User systemAuthenticateBypassEnable(String userid); + } diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permission.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permission.java similarity index 100% rename from src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permission.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permission.java diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permissions.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permissions.java similarity index 91% rename from src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permissions.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permissions.java index d14990b1..d86629b6 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permissions.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Permissions.java @@ -31,6 +31,9 @@ public class Permissions { public static final Permission CAN_WRITE = new Permission(0x0002, "Write"); public static final Permission CAN_DELETE = new Permission(0x0004, "Delete"); public static final Permission CAN_ANYTHING = CAN_READ.combine(CAN_WRITE).combine(CAN_DELETE); + public static final Permission CAN_READ_PROPERTY = new Permission(0x0010, "Read Property"); + public static final Permission CAN_WRITE_PROPERTY = new Permission(0x0020, "Write Property"); + public static final Permission CAN_ANYTHING_PROPERTY = CAN_READ_PROPERTY.combine(CAN_WRITE_PROPERTY); public static final Permission CAN_READ_ACL = new Permission(0x1000, "Read ACL"); public static final Permission CAN_WRITE_ACL = new Permission(0x2000, "Write ACL"); public static final Permission CAN_DELETE_ACL = new Permission(0x4000, "Delete ACL"); @@ -68,12 +71,14 @@ private static Map createConvertToSparsePermissions() { b.put("write", Permissions.CAN_WRITE); b.put("delete", Permissions.CAN_DELETE); b.put("view", Permissions.CAN_READ); - b.put("manage", Permissions.CAN_MANAGE); - b.put("all", Permissions.ALL); + b.put("anything", Permissions.CAN_ANYTHING); b.put("read-acl", Permissions.CAN_READ_ACL); b.put("write-acl", Permissions.CAN_WRITE_ACL); b.put("delete-acl", Permissions.CAN_DELETE_ACL); b.put("manage-acl", Permissions.CAN_ANYTHING_ACL); + b.put("anything-acl", Permissions.CAN_ANYTHING_ACL); + b.put("manage", Permissions.CAN_MANAGE); + b.put("all", Permissions.ALL); return b.build(); } diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/PrincipalTokenResolver.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/PrincipalTokenResolver.java new file mode 100644 index 00000000..509db16f --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/PrincipalTokenResolver.java @@ -0,0 +1,40 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite.accesscontrol; + +import org.sakaiproject.nakamura.api.lite.content.Content; + +import java.util.List; + +/** + * Resolves proxyPrincipals to tokens. An implementation of this will be + * provided by the caller if principal tokens are to be resolved. This + * implementation should bind to the user in question. + */ +public interface PrincipalTokenResolver { + + /** + * Resolve the principal. + * + * @param principal + * @return the tokens associated with the proxyPrincipal, could be more than + * one. + */ + List resolveTokens(String principal); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/PrincipalValidatorPlugin.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/PrincipalValidatorPlugin.java new file mode 100644 index 00000000..5e927bbb --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/PrincipalValidatorPlugin.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite.accesscontrol; + +import org.sakaiproject.nakamura.api.lite.content.Content; + +/** + * Validates a principal Token. + */ +public interface PrincipalValidatorPlugin { + + /** + * Validate the token to see if its current. This should not need to consider + * the user since if the user is relevant they will have access to the token, + * if not, the token would not have been resolved for the user. + * + * @param proxyPrincipalToken + * @return true if the principal is valid, and the user who resolved it can + * have the principal. + */ + boolean validate(Content proxyPrincipalToken); + + /** + * @return a list of fields that must be protected, these are incorporated + * into the hmac to ensure no tampering. + */ + String[] getProtectedFields(); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/PrincipalValidatorResolver.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/PrincipalValidatorResolver.java new file mode 100644 index 00000000..a4f34df9 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/PrincipalValidatorResolver.java @@ -0,0 +1,49 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite.accesscontrol; + +/** + * Resolves a Key to a PrincipalValidatorPlugin, and provides a location for + * Plugins to register. Plugins should depend on this service so they can + * register. + */ +public interface PrincipalValidatorResolver { + + /** + * @param key + * the name of the plugin + * @return the plugin, or null if not found. + */ + PrincipalValidatorPlugin getPluginByName(String key); + + /** + * Register a plugin. + * + * @param key + * @param plugin + */ + void registerPlugin(String key, PrincipalValidatorPlugin plugin); + + /** + * De-register a plugin. + * + * @param key + */ + void unregisterPlugin(String key); + +} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Security.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Security.java similarity index 100% rename from src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Security.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/accesscontrol/Security.java diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Authorizable.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Authorizable.java new file mode 100644 index 00000000..15ba217d --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Authorizable.java @@ -0,0 +1,490 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite.authorizable; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessControlManager; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; +import org.sakaiproject.nakamura.api.lite.util.Iterables; +import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlledMap; +import org.sakaiproject.nakamura.lite.accesscontrol.PropertyAcl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; + +/** + * Base Authorizable object. + */ +public class Authorizable { + + public static final String PASSWORD_FIELD = "pwd"; + + /** + * List of principals that this Authorizable has. + */ + public static final String PRINCIPALS_FIELD = "principals"; + + /** + * List of members that are members of this authorizable. + */ + public static final String MEMBERS_FIELD = "members"; + + /** + * The ID of the authorizable. + */ + public static final String ID_FIELD = "id"; + + /** + * The name of the authorizable. + */ + public static final String NAME_FIELD = "name"; + + /** + * The type of the authorizable, either g or u (Group or User) + */ + public static final String AUTHORIZABLE_TYPE_FIELD = "type"; + + /** + * The type value indicating a group. + */ + public static final String GROUP_VALUE = "g"; + /** + * The type value indicating a user. + */ + public static final String USER_VALUE = "u"; + + /** + * The name of the administrators group, members of which are granted access + * to certain functions. + */ + public static final String ADMINISTRATORS_GROUP = "administrators"; + + /** + * The time (epoch long) the authroizable was modified. + */ + public static final String LASTMODIFIED_FIELD = "lastModified"; + /** + * The ID of the authorizable that last modified this authorizable. + */ + public static final String LASTMODIFIED_BY_FIELD = "lastModifiedBy"; + /** + * The time (epoch long) when the authorizable was created. + */ + public static final String CREATED_FIELD = "created"; + /** + * The ID of the authorizable that created this authorizable. + */ + public static final String CREATED_BY_FIELD = "createdBy"; + + /** + * If the fields is set, then it defines the period during which the user + * may login. The fields upto 2 ISO8601 formatted dates, defining the start + * and end periods. If the value starts with a , eg ,2011-12-10 then the + * period is assumed to end on the date provided. If it ends with a , eg + * 2011-12-10, then the period starts on the date provided. If the date + * contains no time, the period is for the day in the timezone of the server + * time. If the period contains a time then the its precise. + */ + public static final String LOGIN_ENABLED_PERIOD_FIELD = "loginEnabledPeriod"; + + + /** + * A set of properties to filter out when sending out and setting. + */ + private static final Set FILTER_PROPERTIES = ImmutableSet.of(PASSWORD_FIELD, ID_FIELD); + + /** + * A set of properties that are not visiable. + */ + private static final Set PRIVATE_PROPERTIES = ImmutableSet.of(PASSWORD_FIELD); + + /** + * no password value. + */ + public static final String NO_PASSWORD = "--none--"; + + protected static final Logger LOGGER = LoggerFactory.getLogger(Authorizable.class); + + private static final Set IMMUTABLE_AUTH_IDS = ImmutableSet.of(Group.EVERYONE); + + /** + * A read only copy of the map, protected by an Immutable Wrapper + */ + protected Map authorizableMap; + /** + * A set of principals that this Authorizable has. + */ + protected Set principals; + + /** + * The ID of this authorizable. + */ + protected String id; + + /** + * Modifications to the map. + */ + protected Map modifiedMap; + /** + * true if the principals have been modified. + */ + protected boolean principalsModified; + + /** + * true if the object is new. + */ + private boolean isObjectNew = true; + + /** + * true if the object is read only. + */ + protected boolean readOnly; + + private boolean immutable; + + /** + * The Acl at load time for properties on this authorizable. + */ + private PropertyAcl propertyAcl; + + public Authorizable(Map autorizableMap) throws StorageClientException, AccessDeniedException { + this(autorizableMap, null); + } + public Authorizable(Map authorizableMap, Session session) throws StorageClientException, AccessDeniedException { + principalsModified = false; + this.id = (String) authorizableMap.get(ID_FIELD); + if (id == null || id.charAt(0) == '_') { + LOGGER.warn("Authorizables cant be null or start with _ this {} will cause problems ", id); + } + if ( session != null && !User.ADMIN_USER.equals(session.getUserId()) ) { + AccessControlManager accessControlManager = session.getAccessControlManager(); + propertyAcl = accessControlManager.getPropertyAcl(Security.ZONE_AUTHORIZABLES, id ); + } else { + propertyAcl = new PropertyAcl(); + } + modifiedMap = new AccessControlledMap(propertyAcl); + init(authorizableMap, propertyAcl); + } + + private void init(Map newMap, PropertyAcl propertyAcl) { + this.authorizableMap = StorageClientUtils.getFilterMap(newMap, null, null, propertyAcl.readDeniedSet(), false); + Object principalsB = authorizableMap.get(PRINCIPALS_FIELD); + if (principalsB == null) { + this.principals = Sets.newLinkedHashSet(); + } else { + this.principals = Sets.newLinkedHashSet(Iterables.of(StringUtils.split( + (String) principalsB, ';'))); + } + if (!User.ANON_USER.equals(this.id)) { + this.principals.add(Group.EVERYONE); + } + } + + /** + * @param newMap + * the new map to reset the authorizable to. + */ + public void reset(Map newMap) { + if (!readOnly) { + principalsModified = false; + modifiedMap.clear(); + init(newMap, propertyAcl); + + LOGGER.debug("After Update to Authorizable {} ", authorizableMap); + } + } + + /** + * @return an array of principals that the authorizable has, indicating the + * groups that the authorizable is a member of and any other + * principals that have been granted to this authorizable. + * Principals are generally use in access control list and are not + * limited to group ids. + */ + public String[] getPrincipals() { + return principals.toArray(new String[principals.size()]); + } + + /** + * @return the ID of this authorizable (immutable) + */ + public String getId() { + return id; + } + + // TODO: Unit test + /** + * @return get the current set of safe properties that can be updated, laking into account any modifications. + */ + public Map getSafeProperties() { + if (!readOnly && principalsModified) { + modifiedMap.put(PRINCIPALS_FIELD, StringUtils.join(principals, ';')); + } + return StorageClientUtils.getFilterMap(authorizableMap, modifiedMap, null, + FILTER_PROPERTIES, false); + } + + /** + * Returns the properties of the authorizable taking into account any modifications. This includes fields that could be modified. + * @return + */ + public Map getProperties() { + if (!readOnly && principalsModified) { + modifiedMap.put(PRINCIPALS_FIELD, StringUtils.join(principals, ';')); + } + return StorageClientUtils.getFilterMap(authorizableMap, modifiedMap, null, + PRIVATE_PROPERTIES, false); + } + + /** + * @return true if this authorizable is a group. + */ + public boolean isGroup() { + return false; + } + + /** + * @return get the orriginal properties of this authorizable ignoring any unsaved properties. + */ + public Map getOriginalProperties() { + return StorageClientUtils.getFilterMap(authorizableMap, null, null, FILTER_PROPERTIES, false); + } + + /** + * Set a property. The property will only be set if writable. If the property or this athorizable is read only, nothing will happen. + * @param name the name of the property + * @param value the value of the property. + */ + public void setProperty(String name, Object value) { + if (!readOnly && !FILTER_PROPERTIES.contains(name)) { + Object cv = authorizableMap.get(name); + if ( value == null ) { + if ( cv != null && !(cv instanceof RemoveProperty)) { + modifiedMap.put(name, new RemoveProperty()); + } + } else if (!value.equals(cv)) { + modifiedMap.put(name, value); + } else if (modifiedMap.containsKey(name) && !value.equals(modifiedMap.get(name))) { + modifiedMap.put(name, value); + } + + } + } + + /** + * @param name + * @return the instance of the property. Note that if the property is an array or object it will be mutable. + */ + public Object getProperty(String name) { + if (!PRIVATE_PROPERTIES.contains(name)) { + if (modifiedMap.containsKey(name)) { + Object o = modifiedMap.get(name); + if (o instanceof RemoveProperty) { + return null; + } else { + return o; + } + } + return authorizableMap.get(name); + } + return null; + } + + /** + * remove the property. + * @param name + */ + public void removeProperty(String key) { + if (!readOnly && (authorizableMap.containsKey(key) || modifiedMap.containsKey(key))) { + modifiedMap.put(key, new RemoveProperty()); + } + } + + /** + * add a principal to this authorizable. + * @param principal + */ + public void addPrincipal(String principal) { + if (!readOnly && !principals.contains(principal)) { + principals.add(principal); + principalsModified = true; + } + } + + /** + * remove a principal from this authorizable. + * @param principal + */ + public void removePrincipal(String principal) { + if (!readOnly && principals.contains(principal)) { + principals.remove(principal); + principalsModified = true; + } + } + + /** + * @return a Map or properties that should be saved to storage. This merges the original properties and unsaved changed. + */ + public Map getPropertiesForUpdate() { + if (!readOnly && principalsModified) { + principals.remove(Group.EVERYONE); + modifiedMap.put(PRINCIPALS_FIELD, StringUtils.join(principals, ';')); + principals.add(Group.EVERYONE); + } + return StorageClientUtils.getFilterMap(authorizableMap, modifiedMap, null, + FILTER_PROPERTIES, true); + } + + /** + * @return true if the authorizable is modified. + */ + public boolean isModified() { + return !readOnly && (principalsModified || (modifiedMap.size() > 0)); + } + + /** + * @param name + * @return true if the property is set in the unsaved version of the authorizable. + */ + public boolean hasProperty(String name) { + Object modifiedValue = modifiedMap.get(name); + if (modifiedValue instanceof RemoveProperty) { + return false; + } + if (modifiedValue != null) { + return true; + } + return authorizableMap.containsKey(name); + } + + /** + * @param authorizableManager + * @return an Iterator containing Groups this authorizable is a direct or + * indirect member of. + */ + public Iterator memberOf(final AuthorizableManager authorizableManager) { + final List memberIds = new ArrayList(); + Collections.addAll(memberIds, getPrincipals()); + return new PreemptiveIterator() { + + private int p; + private Group group; + + @Override + protected boolean internalHasNext() { + while (p < memberIds.size()) { + String id = memberIds.get(p); + p++; + try { + Authorizable a = authorizableManager.findAuthorizable(id); + if (a instanceof Group) { + group = (Group) a; + for (String pid : a.getPrincipals()) { + if (!memberIds.contains(pid)) { + memberIds.add(pid); + } + } + return true; + } + } catch (AccessDeniedException e) { + LOGGER.debug(e.getMessage(), e); + } catch (StorageClientException e) { + LOGGER.debug(e.getMessage(), e); + } + } + close(); + return false; + } + + @Override + protected Group internalNext() { + return group; + } + + }; + } + + /** + * @param isObjectNew mark the object as new. + */ + protected void setObjectNew(boolean isObjectNew) { + this.isObjectNew = isObjectNew; + } + + /** + * @return true if the object is new. + */ + public boolean isNew() { + return isObjectNew; + } + + /** + * @param readOnly mark the object read only. + */ + protected void setReadOnly(boolean readOnly) { + if (!this.readOnly) { + this.readOnly = readOnly; + } + } + public boolean isReadOnly() { + return readOnly; + } + + @Override + public int hashCode() { + return id.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof Authorizable) { + Authorizable a = (Authorizable) obj; + return id.equals(a.getId()); + } + return super.equals(obj); + } + + /** + * {@inheritDoc} + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return id; + } + + public boolean isImmutable() { + return immutable || IMMUTABLE_AUTH_IDS.contains(id); + } +} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/AuthorizableManager.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/AuthorizableManager.java similarity index 74% rename from src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/AuthorizableManager.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/AuthorizableManager.java index 92ff79d1..38b12df4 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/AuthorizableManager.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/AuthorizableManager.java @@ -49,6 +49,16 @@ Authorizable findAuthorizable(String authorizableId) throws AccessDeniedExceptio void updateAuthorizable(Authorizable authorizable) throws AccessDeniedException, StorageClientException; + /** + * Update an authorizable with the option to not touch the user last modified information. + * @param authorizable the authorizable. + * @param withTouch if false the last modified information will not be changed, but only admin users can perform this. + * @throws AccessDeniedException + * @throws StorageClientException + */ + void updateAuthorizable(Authorizable authorizable, boolean withTouch) + throws AccessDeniedException, StorageClientException; + /** * Create a group * @param authorizableId the group ID @@ -94,6 +104,16 @@ boolean createUser(String userId, String userName, String password, void changePassword(Authorizable authorizable, String password, String oldPassword) throws StorageClientException, AccessDeniedException; + + /** + * Administratively disable a password for the supplied user. Only admin can do this. + * @param authorizable + * @throws StorageClientException + * @throws AccessDeniedException + */ + void disablePassword(Authorizable authorizable) + throws StorageClientException, AccessDeniedException; + /** * Find authorizables by exact property matches * @param propertyName the name of the property @@ -105,4 +125,25 @@ void changePassword(Authorizable authorizable, String password, String oldPasswo Iterator findAuthorizable(String propertyName, String value, Class authorizableType) throws StorageClientException; + /** + * @return the user bound to this authorizable manager. + */ + User getUser(); + + + /** + * @param path cause an event to be emitted for the path that will cause a refresh. + * @throws AccessDeniedException + * @throws StorageClientException + */ + void triggerRefresh(String path) throws StorageClientException, AccessDeniedException; + + + /** + * Cause an event to be emitted for all items. + * @throws StorageClientException + */ + void triggerRefreshAll() throws StorageClientException; + + } diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Group.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Group.java similarity index 65% rename from src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Group.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Group.java index 73fd3512..3d87ec6d 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Group.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Group.java @@ -21,6 +21,9 @@ import com.google.common.collect.Sets; import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; import org.sakaiproject.nakamura.api.lite.util.Iterables; import org.sakaiproject.nakamura.lite.authorizable.GroupInternal; @@ -28,7 +31,7 @@ import java.util.Set; /** - * A group has a list of members that is maintaiend in the group. This is + * A group has a list of members that is maintained in the group. This is * reflected as principals in each member, managed by the AuthorizableManager, * only updated on save. * @@ -41,46 +44,71 @@ public class Group extends Authorizable { * The ID of the everyone group. Includes all users except anon. */ public static final String EVERYONE = "everyone"; - public static final Group EVERYONE_GROUP = new GroupInternal(ImmutableMap.of("id",(Object)EVERYONE), false, true); + public static final Group EVERYONE_GROUP = getEveryone(); private Set members; private Set membersAdded; private Set membersRemoved; private boolean membersModified; - public Group(Map groupMap) { - super(groupMap); + public Group(Map groupMap) throws StorageClientException, AccessDeniedException { + this(groupMap, null); + } + + public Group(Map groupMap, Session session) throws StorageClientException, AccessDeniedException { + super(groupMap, session); this.members = Sets.newLinkedHashSet(Iterables.of(StringUtils.split( (String) authorizableMap.get(MEMBERS_FIELD), ';'))); this.membersAdded = Sets.newHashSet(); this.membersRemoved = Sets.newHashSet(); membersModified = true; } - + private static Group getEveryone() { + try { + return new GroupInternal(ImmutableMap.of("id", (Object) EVERYONE), null, false, true); + } catch (StorageClientException e) { + // it cant throw this since the session is null + } catch (AccessDeniedException e) { + // it cant throw this since the session is null + } + return null; + } + + /** + * {@inheritDoc} + */ @Override public boolean isGroup() { return true; } - + /** + * {@inheritDoc} + */ @Override public Map getPropertiesForUpdate() { - if ( !readOnly && membersModified ) { + if (!readOnly && membersModified) { modifiedMap.put(MEMBERS_FIELD, StringUtils.join(members, ';')); } - Map propertiesForUpdate = super.getPropertiesForUpdate(); + Map propertiesForUpdate = super.getPropertiesForUpdate(); return propertiesForUpdate; } - + + /** + * {@inheritDoc} + */ @Override // TODO: Unit test public Map getSafeProperties() { - if ( !readOnly && membersModified ) { + if (!readOnly && membersModified) { modifiedMap.put(MEMBERS_FIELD, StringUtils.join(members, ';')); } return super.getSafeProperties(); } - + + /** + * {@inheritDoc} + */ @Override // TODO: Unit test public boolean isModified() { @@ -93,25 +121,27 @@ public String[] getMembers() { public void addMember(String member) { if (!readOnly && !members.contains(member)) { - LOGGER.debug(" {} adding Member {} to {} ",new Object[]{this,member, members}); + LOGGER.debug(" {} adding Member {} to {} ", new Object[] { this, member, members }); members.add(member); membersAdded.add(member); membersRemoved.remove(member); membersModified = true; } else { - LOGGER.debug("{} Member {} already present in {} ",new Object[]{this,member,members}); + LOGGER.debug("{} Member {} already present in {} ", new Object[] { this, member, + members }); } } public void removeMember(String member) { if (!readOnly && members.contains(member)) { - LOGGER.debug(" {} removing Member {} to {} ",new Object[]{this,member, members}); + LOGGER.debug(" {} removing Member {} to {} ", new Object[] { this, member, members }); members.remove(member); membersAdded.remove(member); membersRemoved.add(member); membersModified = true; } else { - LOGGER.debug("{} Member {} already not present in {} ",new Object[]{this,member,members}); + LOGGER.debug("{} Member {} already not present in {} ", new Object[] { this, member, + members }); } } @@ -124,9 +154,9 @@ public String[] getMembersRemoved() { } public void reset(Map newMap) { - if (!readOnly ) { + if (!readOnly) { super.reset(newMap); - LOGGER.debug("{} reset ",new Object[]{this}); + LOGGER.debug("{} reset ", new Object[] { this }); this.members = Sets.newLinkedHashSet(Iterables.of(StringUtils.split( (String) authorizableMap.get(MEMBERS_FIELD), ';'))); membersAdded.clear(); @@ -134,5 +164,15 @@ public void reset(Map newMap) { membersModified = false; } } + + @Override + public boolean equals(Object obj) { + return super.equals(obj); // Group and User shared the same key space. + } + + @Override + public int hashCode() { + return super.hashCode(); + } } diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/User.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/User.java similarity index 51% rename from src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/User.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/User.java index 5ea86995..fbe453e4 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/User.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/User.java @@ -20,15 +20,21 @@ import com.google.common.collect.ImmutableSet; import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.util.EnabledPeriod; import java.security.Principal; +import java.util.Calendar; import java.util.Map; import java.util.Set; +import java.util.TimeZone; import javax.security.auth.Subject; /** - * Represetnation of the User. + * Representation of the User. */ public class User extends Authorizable { @@ -41,13 +47,18 @@ public class User extends Authorizable { */ public static final String ANON_USER = "anonymous"; /** - * The ID of teh system user. + * The ID of the system user. */ public static final String SYSTEM_USER = "system"; public static final String IMPERSONATORS_FIELD = "impersonators"; - public User(Map userMap) { - super(userMap); + public User(Map userMap) throws StorageClientException, AccessDeniedException { + this(userMap, null); + } + + public User(Map userMap, Session session) throws StorageClientException, + AccessDeniedException { + super(userMap, session); } /** @@ -74,7 +85,7 @@ public boolean allowImpersonate(Subject impersSubject) { if (impersonators == null) { return false; } - Set impersonatorSet = ImmutableSet.of(StringUtils.split(impersonators, ';')); + Set impersonatorSet = ImmutableSet.copyOf(StringUtils.split(impersonators, ';')); for (Principal p : impersSubject.getPrincipals()) { if (ADMIN_USER.equals(p.getName()) || SYSTEM_USER.equals(p.getName()) @@ -85,4 +96,47 @@ public boolean allowImpersonate(Subject impersSubject) { return false; } + /** + * @return returns true if login is enabled for this user. + * @since 1.4 + */ + public boolean isLoginEnabled() { + return EnabledPeriod.isInEnabledPeriod((String) getProperty(LOGIN_ENABLED_PERIOD_FIELD)); + } + + /** + * Sets the login enabled time + * + * @param from + * UTC ms time after which user login is enabled. < 0 means no + * start time. + * @param to + * UTC ms time before which the user login is enabled, < 0 means + * no end time. + * @param day + * true if the time represents a day rather than a time + * @param timeZone + * the timezone which both these times should be interpreted in + * (relevant for a day setting). + * @since 1.4 + */ + public void setLoginEnabled(long from, long to, boolean day, TimeZone timeZone) { + String enabledSetting = EnabledPeriod.getEnableValue(from, to, day, timeZone); + if (enabledSetting == null) { + removeProperty(LOGIN_ENABLED_PERIOD_FIELD); + } else { + setProperty(LOGIN_ENABLED_PERIOD_FIELD, enabledSetting); + } + } + + /** + * @return an array length 2 of the times when the user is enabled in order + * from to. null indicates no time specified for either from or to + * times. This user will be allowed to login between those times. + * @since 1.4 + */ + public Calendar[] getLoginEnabledPeriod() { + return EnabledPeriod.getEnabledPeriod((String) getProperty(LOGIN_ENABLED_PERIOD_FIELD)); + } + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/content/ActionRecord.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/content/ActionRecord.java new file mode 100644 index 00000000..f0d992c6 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/content/ActionRecord.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite.content; + +/** + * Supplemental object for tracking transactions where sub-nodes are included + * + * @param from + * - Original path of object + * @param to + * - Final path of object (can be null if transaction is delete) + */ +public class ActionRecord { + private String from; + private String to; + + public ActionRecord(String newFrom, String newTo) { + from = newFrom; + to = newTo; + } + + public String getFrom() { + return from; + } + + public String getTo() { + return to; + } +} \ No newline at end of file diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/content/Content.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/content/Content.java similarity index 71% rename from src/main/java/org/sakaiproject/nakamura/api/lite/content/Content.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/content/Content.java index e5f9b095..8081d784 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/content/Content.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/content/Content.java @@ -32,35 +32,46 @@ * already exist with a cotentManager.get(path), and if that responds with a * null object, then create a new Content object with new Content(path, map); * where path is the path of the content object and map is null or the initial - * properties of the content object. At that poin the conten object is created + * properties of the content object. At that point the content object is created * but not saved. To save perform contentManager.udpate(contentObject) which - * will create any itermediate path and save the content object. At that point + * will create any intermediate path and save the content object. At that point * it will be persisted in the content store and have structure objects. *

*

+ * Please note, if you create a Content object using the public constructor, + * that object will have no children until it is saved and re-loaded by the + * ContentManager. Any attempt to list children of the newly created Content + * instance will result in an empty iterator. + *

+ *

* If you need to make changes to a Content object, get it out of the store, * with contentManager.get(path); then change some properties before performing * a contentManager.update(contentObject); Transactions are managed by the * underlying store implementation and are not actively managed in the - * cotnentManager. If your underlying store is not transactional, the update + * contentManager. If your underlying store is not transactional, the update * operation will persist directly to the underlying store. Concurrent threads * in the same JVM may retrieve the same underlying data from the content store - * but each cotnentManager will operate on its own set of contentObjects - * isolated from other cotnentManagers until the update operation is completed. + * but each contentManager will operate on its own set of contentObjects + * isolated from other contentManagers until the update operation is completed. *

*/ public class Content extends InternalContent { /** * Create a brand new content object not connected to the underlying store. - * To save use contentManager.update(contentObject); + * To save use contentManager.update(contentObject); Since the object is not + * connected to the underlying store, it not have any children. Only Content + * objects loaded from the underlying store with ContentManager.get(path) + * are connected to the underlying store and have children. This is the case + * even if the path of the Content instance created via the public + * constructor exists within the underlying content store. * * @param path * the path in the store that should not already exist. If it * does exist, this new object will overwrite. * @param content * a map of initial content metadata. - * @param + * @param */ public Content(String path, java.util.Map content) { super(path, content); diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/content/ContentManager.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/content/ContentManager.java similarity index 56% rename from src/main/java/org/sakaiproject/nakamura/api/lite/content/ContentManager.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/content/ContentManager.java index 92203a58..f136ba3b 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/content/ContentManager.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/content/ContentManager.java @@ -19,10 +19,13 @@ import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalTokenResolver; import java.io.IOException; import java.io.InputStream; +import java.util.Iterator; import java.util.List; +import java.util.Map; /** * Defines a ContentManager service for operating on content. @@ -45,6 +48,25 @@ public interface ContentManager { */ Content get(String path) throws StorageClientException, AccessDeniedException; + /** + * Perform a search for content matching the given properties + * + * @param searchProperties a Map of property names and values. All the properties must match to give a result + * @return an Iterable of Content items in no guaranteed order + * @throws StorageClientException + * @throws AccessDeniedException + */ + // TODO needs better documentation - not clear how to OR or AND + Iterable find(Map searchProperties) throws StorageClientException, AccessDeniedException; + + /** + * Counts the maximum number of results a find operation could return, ignoring access control. This method may cause problems + * if used inappropriately on sets of results that are mostly not readable by the current user (eg how many documents are there with "ieb" and "your fired" in ?) + * @param searchProperties Map the same as the finder + * @return maximum number of results a find could return. + */ + int count(Map countSearch) throws StorageClientException; + /** * Save the current version of the content object including metadata and * file bodies as a read only snapshot @@ -61,23 +83,44 @@ public interface ContentManager { * at that location and possibly at parent locations. */ String saveVersion(String path) throws StorageClientException, AccessDeniedException; + + String saveVersion(String path, Map versionMetadata) throws StorageClientException, AccessDeniedException; /** - * Update or create the content object, and intermediate path if necessary, - * stored at the location indicated by the path of the content object - * supplied. - * - * @param content - * the content object to update. - * @throws StorageClientException - * if there was a problem with the operation. - * @throws AccessDeniedException - * if the user is unable to write the object at the path. This - * is not an indication that the objected at the path exists, - * just that the user can't write anything at that location and - * possibly at parent locations. - */ - void update(Content content) throws AccessDeniedException, StorageClientException; + * Update or create the content object, and intermediate path if necessary, + * stored at the location indicated by the path of the content object + * supplied. + * + * @param content + * the content object to update. + * @throws StorageClientException + * if there was a problem with the operation. + * @throws AccessDeniedException + * if the user is unable to write the object at the path. This + * is not an indication that the objected at the path exists, + * just that the user can't write anything at that location and + * possibly at parent locations. + */ + void update(Content content) throws AccessDeniedException, StorageClientException; + + /** + * Update or create the content object, and intermediate path if necessary, + * stored at the location indicated by the path of the content object + * supplied. + * + * @param content + * the content object to update. + * @param withTouch + * if false, the modification timestamp will not be updated. Only admin can use this option. + * @throws StorageClientException + * if there was a problem with the operation. + * @throws AccessDeniedException + * if the user is unable to write the object at the path. This + * is not an indication that the objected at the path exists, + * just that the user can't write anything at that location and + * possibly at parent locations. + */ + void update(Content content, boolean withTouch) throws AccessDeniedException, StorageClientException; /** * Delete the content object at the path indicated. @@ -93,6 +136,7 @@ public interface ContentManager { * possibly at parent locations. */ void delete(String path) throws AccessDeniedException, StorageClientException; + void delete(String path, boolean recurse) throws AccessDeniedException, StorageClientException; /** * Write a body stream associated with the content item at the specified @@ -197,7 +241,7 @@ InputStream getInputStream(String path, String streamId) throws StorageClientExc * the path to copy from, must exist * @param to * the path to copy to, must not exist - * @param deep + * @param withStreams * if true, a copy is made of all the streams, if false the * streams are shared but copies are made of the properties. * @throws IOException @@ -205,12 +249,12 @@ InputStream getInputStream(String path, String streamId) throws StorageClientExc * if the user cant read the source or write the desination. * @throws IOException */ - void copy(String from, String to, boolean deep) throws StorageClientException, + void copy(String from, String to, boolean withStreams) throws StorageClientException, AccessDeniedException, IOException; /** - * Move a content item from to. - * + * Move a content item from to. Equivalent to calling move(from, to, false, true) + * * @param from * the source, must exist * @param to @@ -218,22 +262,56 @@ void copy(String from, String to, boolean deep) throws StorageClientException, * @throws StorageClientException * @throws AccessDeniedException */ - void move(String from, String to) throws AccessDeniedException, StorageClientException; + List move(String from, String to) throws AccessDeniedException, StorageClientException; - /** - * Create a Link. Links place a pointer to real content located at the to - * path, in the from path. Modifications to the underlying content are - * reflected in both locations. Permissions are controlled by the location - * and not the underlying content. - * - * @param from - * the source of the link (the soft part), must not exist. - * @param to - * the destination, must exist - * @throws AccessDeniedException - * if the user cant read the to and write the from - * @throws StorageClientException - */ + /** + * Move a content item from to. Equivalent to calling move(from, to, force, true) + * + * @param from + * the source, must exist + * @param to + * the destination must not exist. + * @param force + * Whether to forcefully move to the destination (i.e. overwrite) + * @throws StorageClientException + * @throws AccessDeniedException + */ + List move(String from, String to, boolean force) throws AccessDeniedException, StorageClientException; + + /** + * Move a content item from to. + * + * @param from + * the source, must exist + * @param to + * the destination must not exist. + * @param force + * Whether to forcefully move to the destination (i.e. overwrite) + * @param keepHistory + * Whether to keep the history of the destination. If + * true, append from as the latest content at + * to. If false, delete to before + * copying from. + * @throws StorageClientException + * @throws AccessDeniedException + */ + List move(String from, String to, boolean force, + boolean keepDestinationHistory) throws AccessDeniedException, + StorageClientException; + + /** + * Create a Link. Links place a pointer to real content located at the to path, in the + * from path. Modifications to the underlying content are reflected in both locations. + * Permissions are controlled by the location and not the underlying content. + * + * @param from + * the source of the link (the soft part), must not exist. + * @param to + * the destination, must exist + * @throws AccessDeniedException + * if the user cant read the to and write the from + * @throws StorageClientException + */ void link(String from, String to) throws AccessDeniedException, StorageClientException; /** @@ -290,4 +368,86 @@ InputStream getVersionInputStream(String path, String versionId) throws AccessDe List getVersionHistory(String path) throws AccessDeniedException, StorageClientException; + /** + * Gets a lazy iterator of child paths. + * @param path the parent path. + * @return + * @throws StorageClientException + */ + Iterator listChildPaths(String path) throws StorageClientException; + + /** + * Get a lazy iterator of child content objects. + * @param path + * @return + * @throws StorageClientException + */ + Iterator listChildren(String path) throws StorageClientException; + + /** + * @param path the path of the content node + * @param streamId the stream id, null for the default stream + * @return true if the stream id is present. + * @throws AccessDeniedException + * @throws StorageClientException + */ + boolean hasBody(String path, String streamId) throws StorageClientException, AccessDeniedException; + + /** + * Sets the principal Token Resolver for all subsequent requests using this + * session. When the ContentManager is invoked it will consult the supplied + * principal Token Resolver to locate any extra tokens that have been + * granted. + * + * @param principalTokenResolver + */ + void setPrincipalTokenResolver(PrincipalTokenResolver principalTokenResolver); + + /** + * Clear the principal Token Resolver + */ + void cleanPrincipalTokenResolver(); + + + /** + * @param path cause an event to be emitted for the path that will cause a refresh. + * @throws AccessDeniedException + * @throws StorageClientException + */ + void triggerRefresh(String path) throws StorageClientException, AccessDeniedException; + + + /** + * Cause an event to be emitted for all items. + * @throws StorageClientException + */ + void triggerRefreshAll() throws StorageClientException; + + /** + * Replace the content at content.getPath() with content. This + * sets any properties in content to new RemoveProperty() if + * there exists a property in the current version at content.getPath() that + * is missing from content. + * + * @param content + * The content to replace at content.getPath(). + * @throws AccessDeniedException + * @throws StorageClientException + */ + void replace(Content content) throws AccessDeniedException, StorageClientException; + + /** + * Replace the content at content.getPath() with content. This + * sets any properties in content to new RemoveProperty() if + * there exists a property in the current version at content.getPath() that + * is missing from content. + * + * @param content + * The content to replace at content.getPath(). + * @param withTouch + * Whether to the update timestamp of the content. + * @throws AccessDeniedException + * @throws StorageClientException + */ + void replace(Content content, boolean withTouch) throws AccessDeniedException, StorageClientException; } diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/lock/AlreadyLockedException.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/lock/AlreadyLockedException.java new file mode 100644 index 00000000..a0bba217 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/lock/AlreadyLockedException.java @@ -0,0 +1,14 @@ +package org.sakaiproject.nakamura.api.lite.lock; + +public class AlreadyLockedException extends Exception { + + /** + * + */ + private static final long serialVersionUID = -6198174336492911030L; + + public AlreadyLockedException(String path) { + super("Lock path: "+path); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/lock/LockManager.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/lock/LockManager.java new file mode 100644 index 00000000..132d7e29 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/lock/LockManager.java @@ -0,0 +1,78 @@ +package org.sakaiproject.nakamura.api.lite.lock; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; + +/** + * A simple hierarchical lock manager with tokens to identify locks. + * Implementations of the interface should not be bound to the content system. + * Locks live in their own hierarchy, and may exist even if there is not object + * present at that location in any other hierarchy. + * + * @author ieb + * + */ +public interface LockManager { + + /** + * Locks a path returning a token for the lock if successful, null if not + * + * @param path + * the path to lock + * @param timeoutInSeconds + * ttl for the lock in s from the time it was created. + * @param extra + * any extra information to be stored with the lock. + * @return the lock token. + * @throws StorageClientException + * @throws AlreadyLockedException + */ + String lock(String path, long timeoutInSeconds, String extra) throws StorageClientException, + AlreadyLockedException; + + /** + * Unlock a path for a given token, if the token and current user match. + * + * @param path + * the path + * @param token + * the token. + * @throws StorageClientException + */ + void unlock(String path, String token) throws StorageClientException; + + /** + * Get the lock state for a path given a token + * + * @param path + * the path + * @param token + * the token + * @return a lock state object which indicates if the token is current and + * bound to the current user. Lock state also indicates the location + * of the current lock. + * @throws StorageClientException + */ + LockState getLockState(String path, String token) throws StorageClientException; + + /** + * Check the it path is locked. + * + * @param path + * the path. + * @return true if the path is locked. + * @throws StorageClientException + */ + boolean isLocked(String path) throws StorageClientException; + + /** + * Refresh the lock keeping the same token. + * @param path + * @param timeoutInSeconds + * @param string + * @param token + * @return the token, which should be the same. + * @throws StorageClientException + */ + String refreshLock(String path, long timeoutInSeconds, String extra, String token) throws StorageClientException; + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/lock/LockState.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/lock/LockState.java new file mode 100644 index 00000000..0874c9f2 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/lock/LockState.java @@ -0,0 +1,79 @@ +package org.sakaiproject.nakamura.api.lite.lock; + + +public class LockState { + + private static final LockState NOT_LOCKED = new LockState(null, false, null, false, false, + null, null); + private final boolean isOwner; + private final String owner; + private final String path; + private final boolean locked; + private String token; + private String extra; + private boolean matchedToken; + + public LockState(String path, boolean isOwner, String owner, boolean locked, + boolean matchedToken, String token, String extra) { + this.path = path; + this.isOwner = isOwner; + this.owner = owner; + this.locked = locked; + this.matchedToken = matchedToken; + this.token = token; + this.extra = extra; + } + + public static LockState getOwnerLockedToken(String path, String owner, String token, + String extra) { + return new LockState(path, true, owner, true, true, token, extra); + } + + public static LockState getOwnerLockedNoToken(String path, String owner, String token, + String extra) { + return new LockState(path, true, owner, true, false, token, extra); + } + + public static LockState getUserLocked(String path, String owner, String token, String extra) { + return new LockState(path, false, owner, true, false, token, extra); + } + + public static LockState getNotLocked() { + return NOT_LOCKED; + } + + public boolean isOwner() { + return isOwner; + } + + public String getLockPath() { + return path; + } + + public boolean isLocked() { + return locked; + } + + public String getToken() { + return token; + } + + public boolean hasMatchedToken() { + return matchedToken; + } + + public String getExtra() { + return extra; + } + + public String getOwner() { + return owner; + } + + @Override + public String toString() { + return " isOwner:" + isOwner + " owner:" + owner + " locked:" + locked + " matchedToken:" + + matchedToken + " token:" + token + " extra:[" + extra + "]"; + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/EnabledPeriod.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/EnabledPeriod.java new file mode 100644 index 00000000..90915c5f --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/EnabledPeriod.java @@ -0,0 +1,71 @@ +package org.sakaiproject.nakamura.api.lite.util; + +import java.util.Calendar; +import java.util.TimeZone; + +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class EnabledPeriod { + + private static final Logger LOGGER = LoggerFactory.getLogger(EnabledPeriod.class); + + public static boolean isInEnabledPeriod(String enabledPeriod) { + Calendar[] period = getEnabledPeriod(enabledPeriod); + Calendar now = new ISO8601Date(); + now.setTimeInMillis(System.currentTimeMillis()); + if (period[0] != null && period[0].compareTo(now) > 0) { + return false; + } + if (period[1] != null && period[1].compareTo(now) <= 0) { + return false; + } + return true; + } + + public static Calendar[] getEnabledPeriod(String enabledPeriod) { + try { + if (enabledPeriod != null) { + enabledPeriod = enabledPeriod.trim(); + if (enabledPeriod.startsWith(",")) { + return new Calendar[] { null, new ISO8601Date(enabledPeriod.substring(1)) }; + } else if (enabledPeriod.endsWith(",")) { + return new Calendar[] { + new ISO8601Date(enabledPeriod.substring(0, enabledPeriod.length() - 1)), + null }; + } else { + String[] period = StringUtils.split(enabledPeriod, ","); + return new Calendar[] { new ISO8601Date(period[0]), new ISO8601Date(period[1]) }; + } + } + } catch (IllegalArgumentException e) { + LOGGER.debug("Invalid date specified ", e); + } + return new Calendar[] { null, null }; + } + + public static String getEnableValue(long from, long to, boolean day, TimeZone zone) { + StringBuilder sb = new StringBuilder(); + if (from > 0) { + ISO8601Date before = new ISO8601Date(); + before.setTimeInMillis(from); + before.setTimeZone(zone); + before.setDate(day); + sb.append(before.toString()); + } + sb.append(","); + if (to > 0) { + ISO8601Date after = new ISO8601Date(); + after.setTimeInMillis(to); + after.setTimeZone(zone); + after.setDate(day); + sb.append(after.toString()); + } + if (sb.length() > 1) { + return sb.toString(); + } + return null; + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/ISO8601Date.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/ISO8601Date.java new file mode 100644 index 00000000..92fe6ea1 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/ISO8601Date.java @@ -0,0 +1,200 @@ +package org.sakaiproject.nakamura.api.lite.util; + +import java.util.Calendar; +import java.util.Formatter; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * + */ +public class ISO8601Date extends GregorianCalendar { + + /** + * + */ + private static final long serialVersionUID = 5115079662422026445L; + private boolean date; + + /* + * 2010-03-17 Separate date and time in UTC: 2010-03-17 06:33Z Combined date + * and time in UTC: 2010-03-17T06:33Z + */ + /** + * + */ + public ISO8601Date() { + date = false; + } + + public ISO8601Date(String spec) { + int l = spec.length(); + int year = -1; + int month = -1; + int day = -1; + int hour = -1; + int min = -1; + int sec = -1; + TimeZone z = null; + date = false; + switch (l) { + case 16:// 19970714T170000Z + case 18:// 19970714T170000+01 + year = Integer.parseInt(spec.substring(0, 4)); + month = Integer.parseInt(spec.substring(4, 6)); + day = Integer.parseInt(spec.substring(6, 8)); + hour = Integer.parseInt(spec.substring(9, 11)); + min = Integer.parseInt(spec.substring(11, 13)); + sec = Integer.parseInt(spec.substring(13, 15)); + if ('Z' == spec.charAt(l - 1)) { + z = TimeZone.getTimeZone("GMT"); + } else { + z = TimeZone.getTimeZone("GMT" + spec.substring(15)); + } + break; + case 20: // 1997-07-14T17:00:00Z // 19970714T170000+0100 + if ('Z' == spec.charAt(l - 1)) { + year = Integer.parseInt(spec.substring(0, 4)); + month = Integer.parseInt(spec.substring(5, 7)); + day = Integer.parseInt(spec.substring(8, 10)); + hour = Integer.parseInt(spec.substring(11, 13)); + min = Integer.parseInt(spec.substring(14, 16)); + sec = Integer.parseInt(spec.substring(17, 19)); + z = TimeZone.getTimeZone("UTC"); + } else { + year = Integer.parseInt(spec.substring(0, 4)); + month = Integer.parseInt(spec.substring(4, 6)); + day = Integer.parseInt(spec.substring(6, 8)); + hour = Integer.parseInt(spec.substring(9, 11)); + min = Integer.parseInt(spec.substring(11, 13)); + sec = Integer.parseInt(spec.substring(13, 15)); + z = TimeZone.getTimeZone("GMT" + spec.substring(15)); + } + break; + case 22: // 1997-07-14T17:00:00+01 + case 25: // 1997-07-14T17:00:00+01:00 + year = Integer.parseInt(spec.substring(0, 4)); + month = Integer.parseInt(spec.substring(5, 7)); + day = Integer.parseInt(spec.substring(8, 10)); + hour = Integer.parseInt(spec.substring(11, 13)); + min = Integer.parseInt(spec.substring(14, 16)); + sec = Integer.parseInt(spec.substring(17, 19)); + z = TimeZone.getTimeZone("GMT" + spec.substring(19)); + date = false; + break; + case 8: // 19970714 + year = Integer.parseInt(spec.substring(0, 4)); + month = Integer.parseInt(spec.substring(4, 6)); + day = Integer.parseInt(spec.substring(6, 8)); + hour = 0; + min = 0; + sec = 0; + z = TimeZone.getDefault(); // we really need to know the timezone of + // the user for + // this. + date = true; + break; + case 10: // 1997-07-14 + year = Integer.parseInt(spec.substring(0, 4)); + month = Integer.parseInt(spec.substring(5, 7)); + day = Integer.parseInt(spec.substring(8, 10)); + hour = 0; + min = 0; + sec = 0; + z = TimeZone.getDefault(); // we really need to know the timezone of + // the user for + // this. + date = true; + break; + default: + throw new IllegalArgumentException("Illeagal ISO8601 Date Time " + spec); + } + if (z == null) { + throw new IllegalArgumentException( + "Time Zone incorrectly formatted, must be one of Z or +00:00 or +0000. Time was " + + spec); + } + setTimeZone(z); + set(MILLISECOND, 0); + set(year, month - 1, day, hour, min, sec); + } + + @Override + public int compareTo(Calendar anotherCalendar) { + if ( date ) { + int cmp = get(YEAR) - anotherCalendar.get(YEAR); + if ( cmp == 0 ) { + cmp = get(DAY_OF_YEAR) - anotherCalendar.get(DAY_OF_YEAR); + } + return cmp; + } + return super.compareTo(anotherCalendar); + } + + @Override + public int hashCode() { + return super.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if ( obj instanceof ISO8601Date ) { + ISO8601Date d = (ISO8601Date) obj; + if ( date && d.date ) { + return get(YEAR) == d.get(YEAR) && get(DAY_OF_YEAR) == d.get(DAY_OF_YEAR); + } else if (date != d.date ) { + return false; + } else { + return super.equals(obj); + } + } + if ( date ) { + return false; + } + return super.equals(obj); + } + + @Override + public String toString() { + Formatter formatter = new Formatter(); + int year = get(YEAR); + int month = get(MONTH) + 1; + int day = get(DAY_OF_MONTH); + int hour = get(HOUR_OF_DAY); + int min = get(MINUTE); + int second = get(SECOND); + if (date) { + formatter.format("%04d-%02d-%02d", year, month, day); + } else { + // this prints out the offset, not the time zone name, but it takes + // into account DST if in effect for the time in question. Not that + // is + // not changed by the time of printing. This was checked on 23/11/2011. + long offset = getTimeZone().getOffset(getTimeInMillis()) / (60000L); + int hoffset = (int) (offset / 60L); + int minoffset = (int) (offset % 60L); + if (offset == 0) { + formatter.format("%04d-%02d-%02dT%02d:%02d:%02dZ", year, month, day, hour, min, + second); + } else if (offset < 0) { + formatter.format("%04d-%02d-%02dT%02d:%02d:%02d-%02d:%02d", year, month, day, hour, + min, second, -hoffset, -minoffset); + } else { + formatter.format("%04d-%02d-%02dT%02d:%02d:%02d+%02d:%02d", year, month, day, hour, + min, second, hoffset, minoffset); + } + } + return formatter.toString(); + } + + /** + * @param b + */ + public void setDate(boolean b) { + date = b; + } + + public boolean isDate() { + return date; + } +} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/util/Iterables.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/Iterables.java similarity index 100% rename from src/main/java/org/sakaiproject/nakamura/api/lite/util/Iterables.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/util/Iterables.java diff --git a/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/PreemptiveIterator.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/PreemptiveIterator.java new file mode 100644 index 00000000..dd716676 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/PreemptiveIterator.java @@ -0,0 +1,88 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite.util; + +import org.sakaiproject.nakamura.lite.storage.spi.CachableDisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.Disposer; + +import java.util.Iterator; +import java.util.Map; +import java.util.NoSuchElementException; + +/** + * A Iterator wrapper that pre-emptively checks the next value in the underlying iterator before responding true to hasNext(). + * @param + */ +public abstract class PreemptiveIterator implements Iterator, CachableDisposableIterator { + + private static final int UNDETERMINED = 0; + private static final int TRUE = 1; + private static final int FALSE = -1; + private int lastCheck = UNDETERMINED; + private Disposer disposer; + + protected abstract boolean internalHasNext(); + + protected abstract T internalNext(); + + /** + * By default a preemptive iterator does not cache. Override this method to make it cache. + */ + @Override + public Map getResultsMap() { + return null; + } + + public final boolean hasNext() { + if (lastCheck == FALSE) { + return false; + } + if (lastCheck != UNDETERMINED) { + return (lastCheck == TRUE); + } + if (internalHasNext()) { + lastCheck = TRUE; + return true; + } + lastCheck = FALSE; + return false; + } + + public final T next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + lastCheck = UNDETERMINED; + return internalNext(); + } + + public final void remove() { + throw new UnsupportedOperationException(); + } + + public void close() { + if ( disposer != null ) { + disposer.unregisterDisposable(this); + } + } + + public void setDisposer(Disposer disposer) { + this.disposer = disposer; + } + +} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/util/Type1UUID.java b/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/Type1UUID.java similarity index 94% rename from src/main/java/org/sakaiproject/nakamura/api/lite/util/Type1UUID.java rename to core/src/main/java/org/sakaiproject/nakamura/api/lite/util/Type1UUID.java index 07008c0f..0476dfe5 100644 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/util/Type1UUID.java +++ b/core/src/main/java/org/sakaiproject/nakamura/api/lite/util/Type1UUID.java @@ -1,20 +1,19 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file + * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * with the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package org.sakaiproject.nakamura.api.lite.util; diff --git a/src/main/java/org/sakaiproject/nakamura/lite/BaseMemoryRepository.java b/core/src/main/java/org/sakaiproject/nakamura/lite/BaseMemoryRepository.java similarity index 61% rename from src/main/java/org/sakaiproject/nakamura/lite/BaseMemoryRepository.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/BaseMemoryRepository.java index 118a20c8..4e25a65a 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/BaseMemoryRepository.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/BaseMemoryRepository.java @@ -1,23 +1,42 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ package org.sakaiproject.nakamura.lite; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; import java.util.Map; /** - * Utiltiy class to create an entirely in memorty Sparse Repository, usefull for + * Utility class to create an entirely in memory Sparse Repository, useful for * testing or bulk internal modifications. */ public class BaseMemoryRepository { @@ -29,9 +48,7 @@ public class BaseMemoryRepository { private RepositoryImpl repository; public BaseMemoryRepository() throws StorageClientException, AccessDeniedException, - ClientPoolException, ClassNotFoundException { - clientPool = getClientPool(); - client = clientPool.getClient(); + ClientPoolException, ClassNotFoundException, IOException { configuration = new ConfigurationImpl(); Map properties = Maps.newHashMap(); properties.put("keyspace", "n"); @@ -39,6 +56,8 @@ public BaseMemoryRepository() throws StorageClientException, AccessDeniedExcepti properties.put("authorizable-column-family", "au"); properties.put("content-column-family", "cn"); configuration.activate(properties); + clientPool = getClientPool(configuration); + client = clientPool.getClient(); AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, configuration); authorizableActivator.setup(); @@ -56,10 +75,11 @@ public void close() { client.close(); } - protected StorageClientPool getClientPool() throws ClassNotFoundException { + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { MemoryStorageClientPool cp = new MemoryStorageClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/CachingManagerImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/CachingManagerImpl.java new file mode 100644 index 00000000..3476a00e --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/CachingManagerImpl.java @@ -0,0 +1,233 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite; + +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.sakaiproject.nakamura.lite.storage.spi.Disposable; +import org.sakaiproject.nakamura.lite.storage.spi.Disposer; +import org.sakaiproject.nakamura.lite.storage.spi.RowHasher; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.security.SecureRandom; +import java.util.Map; + +/** + * Extend this class to add caching to a Manager class. + */ +public abstract class CachingManagerImpl implements DirectCacheAccess { + + private static final Logger LOGGER = LoggerFactory.getLogger(CachingManagerImpl.class); + private Map sharedCache; + private StorageClient client; + private long managerId; + private static SecureRandom secureRandom = new SecureRandom(); // need to assume that the secure random will be reasonably quick to start up + + /** + * Create a new {@link CachingManagerImpl} + * @param client a client to the underlying storage engine + * @param sharedCache the cache where the objects will be stored + */ + public CachingManagerImpl(StorageClient client, Map sharedCache) { + this.client = client; + this.sharedCache = sharedCache; + managerId = getManagerId(); + } + + private long getManagerId() { + // needs to have a low probability of clashing with any other Cache manager in the cluster. + // no idea what the probability of a clash is here, although I assume its lowish. + return secureRandom.nextLong(); + } + + /** + * Try to retrieve an object from the cache. + * Has the side-effect of loading an uncached object into cache the first time. + * @param keySpace the key space we're operating in. + * @param columnFamily the column family for the object + * @param key the object key + * @return the object or null if not cached and not found + * @throws StorageClientException + */ + protected Map getCached(String keySpace, String columnFamily, String key) + throws StorageClientException { + Map m = null; + String cacheKey = getCacheKey(keySpace, columnFamily, key); + + CacheHolder cacheHolder = getFromCacheInternal(cacheKey); + if (cacheHolder != null ) { + m = cacheHolder.get(); + if ( m != null ) { + LOGGER.debug("Cache Hit {} {} {} ", new Object[] { cacheKey, cacheHolder, m }); + } + } + if (m == null) { + m = client.get(keySpace, columnFamily, key); + if (m != null) { + LOGGER.debug("Cache Miss, Found Map {} {}", cacheKey, m); + } + putToCacheInternal(cacheKey, new CacheHolder(m), true); + } + return m; + } + public void putToCache(String cacheKey, CacheHolder cacheHolder) { + putToCache(cacheKey, cacheHolder, false); + } + + public void putToCache(String cacheKey, CacheHolder cacheHolder, boolean respectDeletes) { + if ( client instanceof RowHasher ) { + putToCacheInternal(cacheKey, cacheHolder, respectDeletes); + } + } + + private void putToCacheInternal(String cacheKey, CacheHolder cacheHolder, boolean respectDeletes) { + if (sharedCache != null) { + if ( respectDeletes ) { + CacheHolder ch = sharedCache.get(cacheKey); + if ( ch != null && ch.get() == null ) { + // item is deleted, dont update it + return; + } + } + sharedCache.put(cacheKey, cacheHolder); + } + } + public CacheHolder getFromCache(String cacheKey) { + if ( client instanceof RowHasher ) { + return getFromCacheInternal(cacheKey); + } + return null; + } + private CacheHolder getFromCacheInternal(String cacheKey) { + if (sharedCache != null && sharedCache.containsKey(cacheKey)) { + return sharedCache.get(cacheKey); + } + return null; + } + + protected abstract Logger getLogger(); + + /** + * Combine the parameters into a key suitable for storage and lookup in the cache. + * @param keySpace + * @param columnFamily + * @param key + * @return the cache key + * @throws StorageClientException + */ + private String getCacheKey(String keySpace, String columnFamily, String key) throws StorageClientException { + if ( client instanceof RowHasher) { + return ((RowHasher) client).rowHash(keySpace, columnFamily, key); + } + return keySpace + ":" + columnFamily + ":" + key; + } + + /** + * Remove this object from the cache. Note, StorageClient uses the word + * remove to mean delete. This method should do the same. + * + * @param keySpace + * @param columnFamily + * @param key + * @throws StorageClientException + */ + protected void removeCached(String keySpace, String columnFamily, String key) throws StorageClientException { + if (sharedCache != null) { + // insert a replacement. This should cause an invalidation message to propagate in the cluster. + final String cacheKey = getCacheKey(keySpace, columnFamily, key); + putToCacheInternal(cacheKey, new CacheHolder(null, managerId), false); + LOGGER.debug("Marked as deleted in Cache {} ", cacheKey); + if ( client instanceof Disposer ) { + // we might want to change this to register the action as a commit handler rather than a disposable. + // it depends on if we think the delete is a transactional thing or a operational cache thing. + // at the moment, I am leaning towards an operational cache thing, since regardless of if + // the session commits or not, we want this to dispose when the session is closed, or commits. + ((Disposer)client).registerDisposable(new Disposable() { + + @Override + public void setDisposer(Disposer disposer) { + } + + @Override + public void close() { + CacheHolder ch = sharedCache.get(cacheKey); + if ( ch != null && ch.wasLockedTo(managerId)) { + sharedCache.remove(cacheKey); + LOGGER.debug("Removed deleted marker from Cache {} ", cacheKey); + } + } + }); + } + } + client.remove(keySpace, columnFamily, key); + + } + + /** + * Put an object in the cache + * @param keySpace + * @param columnFamily + * @param key + * @param encodedProperties the object to be stored + * @param probablyNew whether or not this object is new. + * @throws StorageClientException + */ + protected void putCached(String keySpace, String columnFamily, String key, + Map encodedProperties, boolean probablyNew) + throws StorageClientException { + String cacheKey = null; + if ( sharedCache != null ) { + cacheKey = getCacheKey(keySpace, columnFamily, key); + } + if ( sharedCache != null && !probablyNew ) { + CacheHolder ch = getFromCacheInternal(cacheKey); + if ( ch != null && ch.isLocked(this.managerId) ) { + LOGGER.debug("Is Locked {} ",ch); + return; // catch the case where another method creates while something is in the cache. + // this is a big assumption since if the item is not in the cache it will get updated + // there is no difference in sparsemap between create and update, they are all insert operations + // what we are really saying here is that inorder to update the item you have to have just got it + // and if you failed to get it, your update must have been a create operation. As long as the dwell time + // in the cache is longer than the lifetime of an active session then this will be true. + // if the lifetime of an active session is longer (like with a long running background operation) + // then you should expect to see race conditions at this point since the marker in the cache will have + // gone, and the marker in the database has gone, so the put operation, must be a create operation. + // To change this behavior we would need to differentiate more strongly between new and update and change + // probablyNew into certainlyNew, but that would probably break the BASIC assumption of the whole system. + // Update 2011-12-06 related to issue 136 + // I am not certain this code is correct. What happens if the session wants to remove and then add items. + // the session will never get past this point, since sitting in the cache is a null CacheHolder preventing the session + // removing then adding. + // also, how long should the null cache holder be placed in there for ? + // I think the solution is to bind the null Cache holder to the instance of the caching manager that created it, + // let the null Cache holder last for 10s, and during that time only the CachingManagerImpl that created it can remove it. + } + } + LOGGER.debug("Saving {} {} {} {} ", new Object[] { keySpace, columnFamily, key, + encodedProperties }); + client.insert(keySpace, columnFamily, key, encodedProperties, probablyNew); + if ( sharedCache != null ) { + // if we just added a value in, remove the key so that any stale state (including a previously deleted object is removed) + sharedCache.remove(cacheKey); + } + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/ConfigurationImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/ConfigurationImpl.java new file mode 100644 index 00000000..59ba4b7b --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/ConfigurationImpl.java @@ -0,0 +1,178 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite; + +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.Map; +import java.util.Properties; + +import org.apache.commons.lang.StringUtils; +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +@Component(immediate = true, metatype = true) +@Service(value = Configuration.class) +public class ConfigurationImpl implements Configuration { + + @Property(value = "ac") + protected static final String ACL_COLUMN_FAMILY = "acl-column-family"; + @Property(value = "n") + protected static final String KEYSPACE = "keyspace"; + @Property(value = "au") + protected static final String AUTHORIZABLE_COLUMN_FAMILY = "authorizable-column-family"; + @Property(value = "cn") + protected static final String CONTENT_COLUMN_FAMILY = "content-column-family"; + @Property(value = "lk") + protected static final String LOCK_COLUMN_FAMILY = "lock-column-family"; + + protected static final String DEFAULT_INDEX_COLUMN_NAMES = "au:rep:principalName,au:type,cn:sling:resourceType," + + "cn:sakai:pooled-content-manager,cn:sakai:messagestore,cn:sakai:type,cn:sakai:marker,cn:sakai:tag-uuid," + + "cn:sakai:contactstorepath,cn:sakai:state,cn:_created,cn:sakai:category,cn:sakai:messagebox,cn:sakai:from," + + "cn:sakai:subject"; + + @Property(value=DEFAULT_INDEX_COLUMN_NAMES) + protected static final String INDEX_COLUMN_NAMES = "index-column-names"; + + private static final String DEFAULT_INDEX_COLUMN_TYPES = "cn:sakai:pooled-content-manager=String[],cn:sakai:category=String[]"; + + @Property(value=DEFAULT_INDEX_COLUMN_TYPES) + protected static final String INDEX_COLUMN_TYPES = "index-column-types"; + + + private static final String SHAREDCONFIGPATH = "org/sakaiproject/nakamura/lite/shared.properties"; + + protected static final String SHAREDCONFIGPROPERTY = "sparseconfig"; + private static final Logger LOGGER = LoggerFactory.getLogger(ConfigurationImpl.class); + + + private String aclColumnFamily; + private String keySpace; + private String authorizableColumnFamily; + private String contentColumnFamily; + private String lockColumnFamily; + private String[] indexColumnNames; + private Map sharedProperties; + private String[] indexColumnTypes; + + @SuppressWarnings("unchecked") + @Activate + public void activate(Map properties) throws IOException { + aclColumnFamily = StorageClientUtils.getSetting(properties.get(ACL_COLUMN_FAMILY), "ac"); + keySpace = StorageClientUtils.getSetting(properties.get(KEYSPACE), "n"); + authorizableColumnFamily = StorageClientUtils.getSetting(properties.get(AUTHORIZABLE_COLUMN_FAMILY), "au"); + contentColumnFamily = StorageClientUtils.getSetting(properties.get(CONTENT_COLUMN_FAMILY), "cn"); + lockColumnFamily = StorageClientUtils.getSetting(properties.get(LOCK_COLUMN_FAMILY), "ln"); + + // load defaults + // check the classpath + sharedProperties = Maps.newHashMap(); + InputStream in = this.getClass().getClassLoader().getResourceAsStream(SHAREDCONFIGPATH); + if ( in != null ) { + Properties p = new Properties(); + p.load(in); + in.close(); + sharedProperties.putAll(Maps.fromProperties(p)); + } + // Load from a properties file defiend on the command line + String osSharedConfigPath = System.getProperty(SHAREDCONFIGPROPERTY); + if ( osSharedConfigPath != null && StringUtils.isNotEmpty(osSharedConfigPath)) { + File f = new File(osSharedConfigPath); + if ( f.exists() && f.canRead() ) { + FileReader fr = new FileReader(f); + Properties p = new Properties(); + p.load(fr); + fr.close(); + sharedProperties.putAll(Maps.fromProperties(p)); + } else { + LOGGER.warn("Unable to read shared config file {} specified by the system property {} ",f.getAbsolutePath(), SHAREDCONFIGPROPERTY); + } + } + + // make the shared properties immutable. + sharedProperties = ImmutableMap.copyOf(sharedProperties); + indexColumnNames = StringUtils.split(getProperty(INDEX_COLUMN_NAMES,DEFAULT_INDEX_COLUMN_NAMES, sharedProperties, properties),','); + LOGGER.info("Using Configuration for Index Column Names as {}", Arrays.toString(indexColumnNames)); + indexColumnTypes = StringUtils.split(getProperty(INDEX_COLUMN_TYPES,DEFAULT_INDEX_COLUMN_TYPES, sharedProperties, properties),','); + + + + + } + + private String getProperty(String name, String defaultValue, + Map ...properties ) { + // if present in the shared properties, load the default from there. + String value = defaultValue; + for ( Map p : properties ) { + if ( p.containsKey(name) ) { + Object v = p.get(name); + if ( v != null && !defaultValue.equals(v)) { + value = String.valueOf(v); + LOGGER.debug("{} is configured as {}", value); + } + } + } + return value; + + } + + public String getAclColumnFamily() { + return aclColumnFamily; + } + + public String getKeySpace() { + return keySpace; + } + + public String getAuthorizableColumnFamily() { + return authorizableColumnFamily; + } + + public String getContentColumnFamily() { + return contentColumnFamily; + } + + public String getLockColumnFamily() { + return lockColumnFamily; + } + + public String[] getIndexColumnNames() { + return indexColumnNames; + } + public Map getSharedConfig() { + return sharedProperties; + } + + public String[] getIndexColumnTypes() { + return indexColumnTypes; + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/LoggingStorageListener.java b/core/src/main/java/org/sakaiproject/nakamura/lite/LoggingStorageListener.java new file mode 100644 index 00000000..8f255eb3 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/LoggingStorageListener.java @@ -0,0 +1,68 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite; + +import org.sakaiproject.nakamura.api.lite.StoreListener; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.Map; + +public class LoggingStorageListener implements StoreListener { + + private static final Logger LOGGER = LoggerFactory.getLogger(LoggingStorageListener.class); + private boolean quiet; + + public LoggingStorageListener(boolean quiet) { + this.quiet = quiet; + } + + public LoggingStorageListener() { + this.quiet = false; + } + + public void onDelete(String zone, String path, String user, String resourceType, Map beforeEvent, + String... attributes) { + if (!quiet) { + LOGGER.info("Delete {} {} {} {} {} ", + new Object[] { zone, path, user, resourceType, Arrays.toString(attributes) }); + } + } + + public void onUpdate(String zone, String path, String user, String resourceType, boolean isNew, + Map beforeEvent, String... attributes) { + if (!quiet) { + LOGGER.info("Update {} {} {} {} new:{} {} ", new Object[] { zone, path, user, resourceType, isNew, + Arrays.toString(attributes) }); + } + } + + public void onLogin(String userId, String sessionId) { + if (!quiet) { + LOGGER.info("Login {} {}", new Object[] { userId, sessionId }); + } + } + + public void onLogout(String userId, String sessionId) { + if (!quiet) { + LOGGER.info("Logout {} {}", new Object[] { userId, sessionId }); + } + } + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/DisposableIterator.java b/core/src/main/java/org/sakaiproject/nakamura/lite/ManualOperationService.java similarity index 78% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/DisposableIterator.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/ManualOperationService.java index 8b7dbf22..fc615920 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/DisposableIterator.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/ManualOperationService.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Sakai Foundation (SF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -15,16 +15,15 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.storage; - -import java.util.Iterator; +package org.sakaiproject.nakamura.lite; /** - * Disposable Iterators must be closed when they have been used. + * A marker service for components that are disabled and perform an action on + * activation. (OSGi component validation requirement) * * @author ieb * - * @param */ -public interface DisposableIterator extends Iterator, Disposable { +public interface ManualOperationService { + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/NullCacheManagerX.java b/core/src/main/java/org/sakaiproject/nakamura/lite/NullCacheManagerX.java new file mode 100644 index 00000000..783a8c4d --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/NullCacheManagerX.java @@ -0,0 +1,40 @@ +package org.sakaiproject.nakamura.lite; + +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.StorageCacheManager; + +/** + * Unmanaged Caches are used where there is nothing else provided by the client. + * @author ieb + * + */ +public class NullCacheManagerX implements StorageCacheManager { + + + + @Override + public Map getAccessControlCache() { + return null; + } + + @Override + public Map getAuthorizableCache() { + return null; + } + + @Override + public Map getContentCache() { + return null; + } + + @Override + public Map getCache(String cacheName) { + return null; + } + + + + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/OSGiStoreListener.java b/core/src/main/java/org/sakaiproject/nakamura/lite/OSGiStoreListener.java similarity index 73% rename from src/main/java/org/sakaiproject/nakamura/lite/OSGiStoreListener.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/OSGiStoreListener.java index 0636c41f..29807da6 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/OSGiStoreListener.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/OSGiStoreListener.java @@ -1,3 +1,20 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ package org.sakaiproject.nakamura.lite; import com.google.common.collect.ImmutableMap; @@ -18,6 +35,10 @@ import java.util.Hashtable; import java.util.Map; +/** + * When this {@link StoreListener} is notified of a storage action + * it posts an OSGi {@link Event} to the {@link EventAdmin} + */ @Component(immediate = true, metatype = true) @Service public class OSGiStoreListener implements StoreListener { @@ -60,15 +81,21 @@ public class OSGiStoreListener implements StoreListener { } - public void onDelete(String zone, String path, String user, String... attributes) { + /** + * {@inheritDoc} + */ + public void onDelete(String zone, String path, String user, String resourceType, Map beforeEvent, String ... attributes) { String topic = DEFAULT_DELETE_TOPIC; if (deleteTopics.containsKey(zone)) { topic = deleteTopics.get(zone); } - postEvent(topic, path, user, attributes); + postEvent(topic, path, user, resourceType, beforeEvent, attributes); } - public void onUpdate(String zone, String path, String user, boolean isNew, String... attributes) { + /** + * {@inheritDoc} + */ + public void onUpdate(String zone, String path, String user, String resourceType, boolean isNew, Map beforeEvent, String... attributes) { String topic = DEFAULT_UPDATE_TOPIC; if (isNew) { @@ -82,19 +109,29 @@ public void onUpdate(String zone, String path, String user, boolean isNew, Strin } } - postEvent(topic, path, user, attributes); + postEvent(topic, path, user, resourceType, beforeEvent, attributes); } + /** + * {@inheritDoc} + * + * No event is posted for these actions. + */ public void onLogin(String userid, String sessionID) { LOGGER.debug("Login {} {} ", userid, sessionID); } + /** + * {@inheritDoc} + * + * No event is posted for these actions. + */ public void onLogout(String userid, String sessionID) { LOGGER.debug("Logout {} {} ", userid, sessionID); } - private void postEvent(String topic, String path, String user, String[] attributes) { - final Dictionary properties = new Hashtable(); + private void postEvent(String topic, String path, String user, String resourceType, Map beforeEvent, String[] attributes) { + final Dictionary properties = new Hashtable(); if (attributes != null) { for (String attribute : attributes) { String[] parts = StringUtils.split(attribute, ":", 2); @@ -110,7 +147,13 @@ private void postEvent(String topic, String path, String user, String[] attribut if (path != null) { properties.put(PATH_PROPERTY, path); } + if ( resourceType != null ) { + properties.put(RESOURCE_TYPE_PROPERTY, resourceType); + } properties.put(USERID_PROPERTY, user); + if ( beforeEvent != null) { + properties.put(BEFORE_EVENT_PROPERTY, beforeEvent); + } eventAdmin.postEvent(new Event(topic, properties)); } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/RepositoryImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/RepositoryImpl.java similarity index 65% rename from src/main/java/org/sakaiproject/nakamura/lite/RepositoryImpl.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/RepositoryImpl.java index beef8a03..d62bc88b 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/RepositoryImpl.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/RepositoryImpl.java @@ -22,18 +22,23 @@ import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.CacheHolder; import org.sakaiproject.nakamura.api.lite.ClientPoolException; import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.Repository; import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageCacheManager; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StoreListener; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; import org.sakaiproject.nakamura.api.lite.authorizable.User; import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.Map; @@ -41,19 +46,30 @@ @Service(value = Repository.class) public class RepositoryImpl implements Repository { + private static final Logger LOGGER = LoggerFactory.getLogger(RepositoryImpl.class); + @Reference protected Configuration configuration; @Reference protected StorageClientPool clientPool; - - @Reference + + @Reference protected StoreListener storeListener; + @Reference + protected PrincipalValidatorResolver principalValidatorResolver; public RepositoryImpl() { } + public RepositoryImpl(Configuration configuration, StorageClientPool clientPool, + LoggingStorageListener listener) { + this.configuration = configuration; + this.clientPool = clientPool; + this.storeListener = listener; + } + @Activate public void activate(Map properties) throws ClientPoolException, StorageClientException, AccessDeniedException { @@ -64,8 +80,11 @@ public void activate(Map properties) throws ClientPoolException, configuration); authorizableActivator.setup(); } finally { - client.close(); - clientPool.getClient(); + if (client != null) { + client.close(); + } else { + LOGGER.error("Failed to actvate repository, probably failed to create default users"); + } } } @@ -93,17 +112,23 @@ public Session loginAdministrative(String username) throws StorageClientExceptio return openSession(username); } + public Session loginAdministrativeBypassEnable(String username) throws StorageClientException, + ClientPoolException, AccessDeniedException { + return openSessionBypassEnable(username); + } + private Session openSession(String username, String password) throws StorageClientException, AccessDeniedException { StorageClient client = null; try { client = clientPool.getClient(); - AuthenticatorImpl authenticatorImpl = new AuthenticatorImpl(client, configuration); + AuthenticatorImpl authenticatorImpl = new AuthenticatorImpl(client, configuration, getAuthorizableCache(clientPool.getStorageCacheManager())); User currentUser = authenticatorImpl.authenticate(username, password); if (currentUser == null) { throw new StorageClientException("User " + username + " cant login with password"); } - return new SessionImpl(this, currentUser, client, configuration, clientPool.getStorageCacheManager(), storeListener); + return new SessionImpl(this, currentUser, client, configuration, + clientPool.getStorageCacheManager(), storeListener, principalValidatorResolver); } catch (ClientPoolException e) { clientPool.getClient(); throw e; @@ -119,18 +144,54 @@ private Session openSession(String username, String password) throws StorageClie } } + private Map getAuthorizableCache(StorageCacheManager storageCacheManager) { + if ( storageCacheManager != null ) { + return storageCacheManager.getAuthorizableCache(); + } + return null; + } + private Session openSession(String username) throws StorageClientException, AccessDeniedException { StorageClient client = null; try { client = clientPool.getClient(); - AuthenticatorImpl authenticatorImpl = new AuthenticatorImpl(client, configuration); + AuthenticatorImpl authenticatorImpl = new AuthenticatorImpl(client, configuration, getAuthorizableCache(clientPool.getStorageCacheManager())); User currentUser = authenticatorImpl.systemAuthenticate(username); if (currentUser == null) { throw new StorageClientException("User " + username + " does not exist, cant login administratively as this user"); } - return new SessionImpl(this, currentUser, client, configuration, clientPool.getStorageCacheManager(), storeListener); + return new SessionImpl(this, currentUser, client, configuration, + clientPool.getStorageCacheManager(), storeListener, principalValidatorResolver); + } catch (ClientPoolException e) { + clientPool.getClient(); + throw e; + } catch (StorageClientException e) { + clientPool.getClient(); + throw e; + } catch (AccessDeniedException e) { + clientPool.getClient(); + throw e; + } catch (Throwable e) { + clientPool.getClient(); + throw new StorageClientException(e.getMessage(), e); + } + } + + private Session openSessionBypassEnable(String username) throws StorageClientException, + AccessDeniedException { + StorageClient client = null; + try { + client = clientPool.getClient(); + AuthenticatorImpl authenticatorImpl = new AuthenticatorImpl(client, configuration, getAuthorizableCache(clientPool.getStorageCacheManager())); + User currentUser = authenticatorImpl.systemAuthenticateBypassEnable(username); + if (currentUser == null) { + throw new StorageClientException("User " + username + + " does not exist, cant login administratively as this user"); + } + return new SessionImpl(this, currentUser, client, configuration, + clientPool.getStorageCacheManager(), storeListener, principalValidatorResolver); } catch (ClientPoolException e) { clientPool.getClient(); throw e; @@ -156,7 +217,7 @@ public void setConnectionPool(StorageClientPool connectionPool) { public void setStorageListener(StoreListener storeListener) { this.storeListener = storeListener; - + } } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/SessionImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/SessionImpl.java similarity index 57% rename from src/main/java/org/sakaiproject/nakamura/lite/SessionImpl.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/SessionImpl.java index 127b1ca6..6edd2f16 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/SessionImpl.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/SessionImpl.java @@ -17,7 +17,11 @@ */ package org.sakaiproject.nakamura.lite; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.CacheHolder; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.CommitHandler; import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.Repository; import org.sakaiproject.nakamura.api.lite.Session; @@ -26,48 +30,80 @@ import org.sakaiproject.nakamura.api.lite.StoreListener; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; import org.sakaiproject.nakamura.api.lite.accesscontrol.Authenticator; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; import org.sakaiproject.nakamura.api.lite.authorizable.User; import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; import org.sakaiproject.nakamura.lite.authorizable.AuthorizableManagerImpl; import org.sakaiproject.nakamura.lite.content.ContentManagerImpl; -import org.sakaiproject.nakamura.lite.storage.StorageClient; +import org.sakaiproject.nakamura.lite.lock.LockManagerImpl; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Maps; public class SessionImpl implements Session { + private static final Logger LOGGER = LoggerFactory.getLogger(SessionImpl.class); private AccessControlManagerImpl accessControlManager; private ContentManagerImpl contentManager; private AuthorizableManagerImpl authorizableManager; + private LockManagerImpl lockManager; private User currentUser; private Repository repository; private Exception closedAt; private StorageClient client; private Authenticator authenticator; private StoreListener storeListener; + private Map commitHandlers = Maps.newLinkedHashMap(); + private StorageCacheManager storageCacheManager; + private Configuration configuration; + private static long nagclient; public SessionImpl(Repository repository, User currentUser, StorageClient client, - Configuration configuration, StorageCacheManager storageCacheManager, StoreListener storeListener) + Configuration configuration, StorageCacheManager storageCacheManager, + StoreListener storeListener, PrincipalValidatorResolver principalValidatorResolver) throws ClientPoolException, StorageClientException, AccessDeniedException { this.currentUser = currentUser; this.repository = repository; this.client = client; + this.storageCacheManager = storageCacheManager; + this.storeListener = storeListener; + this.configuration = configuration; + + if ( this.storageCacheManager == null ) { + if ( (nagclient % 1000) == 0 ) { + LOGGER.warn("No Cache Manager, All Caching disabled, please provide an Implementation of NamedCacheManager. This message will appear every 1000th time a session is created. "); + } + nagclient++; + } accessControlManager = new AccessControlManagerImpl(client, currentUser, configuration, - storageCacheManager.getAccessControlCache(), storeListener); - authorizableManager = new AuthorizableManagerImpl(currentUser, client, configuration, - accessControlManager, storageCacheManager.getAuthorizableCache(), storeListener); + getCache(configuration.getAclColumnFamily()), storeListener, + principalValidatorResolver); + Map authorizableCache = getCache(configuration + .getAuthorizableColumnFamily()); + authorizableManager = new AuthorizableManagerImpl(currentUser, this, client, configuration, + accessControlManager, authorizableCache, storeListener); - contentManager = new ContentManagerImpl(client, accessControlManager, configuration, storageCacheManager.getContentCache(), storeListener); + contentManager = new ContentManagerImpl(client, accessControlManager, configuration, + getCache(configuration.getContentColumnFamily()), storeListener); + + lockManager = new LockManagerImpl(client, configuration, currentUser, + getCache(configuration.getLockColumnFamily())); + + authenticator = new AuthenticatorImpl(client, configuration, authorizableCache); - authenticator = new AuthenticatorImpl(client, configuration); - this.storeListener = storeListener; storeListener.onLogin(currentUser.getId(), this.toString()); } public void logout() throws ClientPoolException { if (closedAt == null) { + commit(); accessControlManager.close(); authorizableManager.close(); contentManager.close(); + lockManager.close(); client.close(); accessControlManager = null; authorizableManager = null; @@ -94,6 +130,11 @@ public ContentManagerImpl getContentManager() throws StorageClientException { return contentManager; } + public LockManagerImpl getLockManager() throws StorageClientException { + check(); + return lockManager; + } + public Authenticator getAuthenticator() throws StorageClientException { check(); return authenticator; @@ -114,4 +155,39 @@ private void check() throws StorageClientException { } } + public StorageClient getClient() { + return client; + } + + public void addCommitHandler(String key, CommitHandler commitHandler) { + synchronized (commitHandlers) { + commitHandlers.put(key, commitHandler); + } + } + + public void commit() { + synchronized (commitHandlers) { + for (CommitHandler commitHandler : commitHandlers.values()) { + commitHandler.commit(); + } + commitHandlers.clear(); + } + } + + public Map getCache(String columnFamily) { + if (storageCacheManager != null) { + if (configuration.getAuthorizableColumnFamily().equals(columnFamily)) { + return storageCacheManager.getAuthorizableCache(); + } + if (configuration.getAclColumnFamily().equals(columnFamily)) { + return storageCacheManager.getAccessControlCache(); + } + if (configuration.getContentColumnFamily().equals(columnFamily)) { + return storageCacheManager.getContentCache(); + } + return storageCacheManager.getCache(columnFamily); + } + return null; + } + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AccessControlManagerImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AccessControlManagerImpl.java new file mode 100644 index 00000000..d4ebedd4 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AccessControlManagerImpl.java @@ -0,0 +1,731 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.accesscontrol; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.StoreListener; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessControlManager; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permission; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalTokenResolver; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.authorizable.AuthorizableManager; +import org.sakaiproject.nakamura.api.lite.authorizable.Group; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.lite.CachingManagerImpl; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +public class AccessControlManagerImpl extends CachingManagerImpl implements AccessControlManager { + + private static final String _SECRET_KEY = "_secretKey"; + private static final String _PATH = "_aclPath"; + private static final String _OBJECT_TYPE = "_aclType"; + public static final String _KEY = "_aclKey"; + private static final Logger LOGGER = LoggerFactory.getLogger(AccessControlManagerImpl.class); + private static final Set PROTECTED_PROPERTIES = ImmutableSet.of(_SECRET_KEY); + private static final Set READ_ONLY_PROPERTIES = ImmutableSet.of(_SECRET_KEY, _PATH, _OBJECT_TYPE, _KEY); + private User user; + private String keySpace; + private String aclColumnFamily; + private Map cache = new ConcurrentHashMap(); + private boolean closed; + private StoreListener storeListener; + private PrincipalTokenValidator principalTokenValidator; + private PrincipalTokenResolver principalTokenResolver; + private SecureRandom secureRandom; + private AuthorizableManager authorizableManager; + private Map principalCache = new ConcurrentHashMap(); + private ThreadLocal principalRecursionLock = new ThreadLocal(); + private ThreadBoundStackReferenceCounter compilingPermissions = new ThreadBoundStackReferenceCounter(); + + public AccessControlManagerImpl(StorageClient client, User currentUser, Configuration config, + Map sharedCache, StoreListener storeListener, PrincipalValidatorResolver principalValidatorResolver) throws StorageClientException { + super(client, sharedCache); + this.user = currentUser; + this.aclColumnFamily = config.getAclColumnFamily(); + this.keySpace = config.getKeySpace(); + closed = false; + this.storeListener = storeListener; + principalTokenValidator = new PrincipalTokenValidator(principalValidatorResolver); + secureRandom = new SecureRandom(); + } + + public Map getAcl(String objectType, String objectPath) + throws StorageClientException, AccessDeniedException { + checkOpen(); + check(objectType, objectPath, Permissions.CAN_READ_ACL); + + String key = this.getAclKey(objectType, objectPath); + return StorageClientUtils.getFilterMap(getCached(keySpace, aclColumnFamily, key), null, null, PROTECTED_PROPERTIES, false); + } + + /** + * Property principals are stored with keys of the form + * _pp_@@ where principal is a principal. For the + * acl to be selected for the used of this session, they must have that + * principal. property is the name of the property. g or d is grant or deny. + * The value of the ACE is the bitmap for the ACE. All ACEs are selected and + * processed to form the ACE for the user, returned as a PropertyAcl. + */ + public PropertyAcl getPropertyAcl(String objectType, String objectPath) throws AccessDeniedException, StorageClientException { + checkOpen(); + compilingPermissions.inc(); + try { + String key = this.getAclKey(objectType, objectPath); + Map objectAcl = getCached(keySpace, aclColumnFamily, key); + Set orderedPrincipals = Sets.newLinkedHashSet(); + { + String principal = user.getId(); + if ( principal.startsWith("_") ) { + throw new StorageClientException("Princials may not start with _ "); + } + orderedPrincipals.add(principal); + } + for (String principal : getPrincipals(user) ) { + if ( principal.startsWith("_") ) { + throw new StorageClientException("Princials may not start with _ "); + } + orderedPrincipals.add(principal); + } + // Everyone must be the last principal to be applied + if (!User.ANON_USER.equals(user.getId())) { + orderedPrincipals.add(Group.EVERYONE); + } + // go through each principal + Map grants = Maps.newHashMap(); + Map denies = Maps.newHashMap(); + for ( String principal : orderedPrincipals) { + // got through each property + String ppk = PROPERTY_PRINCIPAL_STEM+principal; + for(Entry e : objectAcl.entrySet()) { + String k = e.getKey(); + if ( k.startsWith(ppk)) { + String[] parts = StringUtils.split(k.substring(PROPERTY_PRINCIPAL_STEM.length()),"@"); + String propertyName = parts[1]; + if ( AclModification.isDeny(k)) { + int td = toInt(e.getValue()); + denies.put(propertyName, toInt(denies.get(propertyName)) | td); + } else if ( AclModification.isGrant(k)) { + int tg = toInt(e.getValue()); + grants.put(propertyName, toInt(grants.get(propertyName)) | tg); + } + } + } + } + // if the property has been granted, then that should remove the deny + for ( Entry g : grants.entrySet()) { + String k = g.getKey(); + if ( denies.containsKey(k)) { + denies.put(k, toInt(denies.get(k)) & ~g.getValue()); + } + } + return new PropertyAcl(denies); + } finally { + compilingPermissions.dec(); + } + + } + + + public Map getEffectiveAcl(String objectType, String objectPath) + throws StorageClientException, AccessDeniedException { + throw new UnsupportedOperationException("Nag someone to implement this"); + } + + // to sign a token we need setAcl permissions on the delegate path + /** + * Content Tokens activate ACEs for a user that holds the content token. The + * token is signed by the secret key associated with the target Object/acl + * and the token is token content item is then returned for the caller to + * save. + */ + public void signContentToken(Content token, String securityZone, String objectPath) throws StorageClientException, AccessDeniedException { + checkOpen(); + check(Security.ZONE_CONTENT, objectPath, Permissions.CAN_WRITE_ACL); + check(Security.ZONE_CONTENT, objectPath, Permissions.CAN_READ_ACL); + String key = this.getAclKey(securityZone, objectPath); + Map currentAcl = getCached(keySpace, aclColumnFamily, key); + String secretKey = (String) currentAcl.get(_SECRET_KEY); + principalTokenValidator.signToken(token, secretKey); + // the caller must save the target. + } + + @SuppressWarnings(value="RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE", justification="Not correct, the line in question doesnt check for a null, so the check is not redundant") + public void setAcl(String objectType, String objectPath, AclModification[] aclModifications) + throws StorageClientException, AccessDeniedException { + checkOpen(); + check(objectType, objectPath, Permissions.CAN_WRITE_ACL); + check(objectType, objectPath, Permissions.CAN_READ_ACL); + String key = this.getAclKey(objectType, objectPath); + Map currentAcl = getCached(keySpace, aclColumnFamily, key); + if ( currentAcl == null ) { + currentAcl = Maps.newHashMap(); + } + // every ACL gets a secret key, which avoids doing it later with a special call + Map modifications = Maps.newLinkedHashMap(); + if ( !currentAcl.containsKey(_SECRET_KEY)) { + byte[] secretKeySeed = new byte[20]; + secureRandom.nextBytes(secretKeySeed); + MessageDigest md; + try { + md = MessageDigest.getInstance("SHA1"); + modifications.put(_SECRET_KEY, Base64.encodeBase64URLSafeString(md.digest(secretKeySeed))); + } catch (NoSuchAlgorithmException e) { + LOGGER.error(e.getMessage(),e); + } + } + if ( !currentAcl.containsKey(_KEY)) { + modifications.put(_KEY, key); + modifications.put(_OBJECT_TYPE, objectType); // this is here to make data migration possible in the future + modifications.put(_PATH, objectPath); // same + } + for (AclModification m : aclModifications) { + String name = m.getAceKey(); + if ( READ_ONLY_PROPERTIES.contains(name)) { + continue; + } + if (m.isRemove()) { + modifications.put(name, null); + } else { + + int originalbitmap = getBitMap(name, modifications, currentAcl); + int modifiedbitmap = m.modify(originalbitmap); + LOGGER.debug("Adding Modification {} {} ",name, modifiedbitmap); + modifications.put(name, modifiedbitmap); + + // KERN-1515 + // We need to modify the opposite key to apply the + // reverse of the change we just made. Otherwise, + // you can end up with ACLs with contradictions, like: + // anonymous@g=1, anonymous@d=1 + if (containsKey(inverseKeyOf(name), modifications, currentAcl)) { + // XOR gives us a mask of only the bits that changed + int difference = originalbitmap ^ modifiedbitmap; + int otherbitmap = toInt(getBitMap(inverseKeyOf(name), modifications, currentAcl)); + + // Zero out the bits that have been modified + // + // KERN-1887: This was originally toggling the modified bits + // using: "otherbitmap ^ difference", but this would + // incorrectly grant permissions in some cases (see JIRA + // issue). To avoid inconsistencies between grant and deny + // lists, setting a bit in one list should unset the + // corresponding bit in the other. + int modifiedotherbitmap = otherbitmap & ~difference; + + if (otherbitmap != modifiedotherbitmap) { + // We made a change. Record our modification. + modifications.put(inverseKeyOf(name), modifiedotherbitmap); + } + } + } + } + LOGGER.debug("Updating ACL {} {} ", key, modifications); + putCached(keySpace, aclColumnFamily, key, modifications, (currentAcl == null || currentAcl.size() == 0)); + storeListener.onUpdate(objectType, objectPath, getCurrentUserId(), "type:acl", false, null, "op:acl"); + // clear the compiled cache for this session. + List keys = Lists.newArrayList(); + for ( Entry e : cache.entrySet()) { + if (e.getKey().startsWith(key)) { + keys.add(e.getKey()); + } + } + for ( String k : keys ) { + cache.remove(k); + } + } + + private boolean containsKey(String name, Map map1, + Map map2) { + return map1.containsKey(name) || map2.containsKey(name); + } + + private int getBitMap(String name, Map modifications, + Map currentAcl) { + int bm = 0; + if ( modifications.containsKey(name)) { + bm = toInt(modifications.get(name)); + } else { + bm = toInt(currentAcl.get(name)); + } + return bm; + } + + private String inverseKeyOf(String key) { + if (key == null) { + return null; + } + if (AclModification.isGrant(key)) { + return AclModification.getPrincipal(key) + AclModification.DENIED_MARKER; + } else if (AclModification.isDeny(key)) { + return AclModification.getPrincipal(key) + AclModification.GRANTED_MARKER; + } else { + return key; + } + } + + public void check(String objectType, String objectPath, Permission permission) + throws AccessDeniedException, StorageClientException { + if (user.isAdmin()) { + return; + } + if ( compilingPermissions.isSet() ) { + return; + } + // users can always operate on their own user object. + if (Security.ZONE_AUTHORIZABLES.equals(objectType) && user.getId().equals(objectPath)) { + return; + } + int[] privileges = compilePermission(user, objectType, objectPath, 0); + if (!((permission.getPermission() & privileges[0]) == permission.getPermission())) { + throw new AccessDeniedException(objectType, objectPath, permission.getName(), + user.getId()); + } + } + + + private String getAclKey(String objectType, String objectPath) { + return objectType + ";" + objectPath; + } + + public void setRequestPrincipalResolver(PrincipalTokenResolver principalTokenResolver ) { + this.principalTokenResolver = principalTokenResolver; + } + public void clearRequestPrincipalResolver() { + principalTokenResolver = null; + } + + private int[] compilePermission(Authorizable authorizable, String objectType, + String objectPath, int recursion) throws StorageClientException { + String key = getAclKey(objectType, objectPath); + if (user.getId().equals(authorizable.getId()) && cache.containsKey(key)) { + return cache.get(key); + } else { + LOGGER.debug("Cache Miss {} [{}] ", cache, key); + } + try { + // we need to allow the permissions compile to bypass access control as it needs to see everything. + compilingPermissions.inc(); + Map acl = getCached(keySpace, aclColumnFamily, key); + LOGGER.debug("ACL on {} is {} ", key, acl); + + int grants = 0; + int denies = 0; + if (acl != null) { + + { + String principal = authorizable.getId(); + if ( principal.startsWith("_") ) { + throw new StorageClientException("Princials may not start with _ "); + } + int tg = toInt(acl.get(principal + + AclModification.GRANTED_MARKER)); + int td = toInt(acl + .get(principal + AclModification.DENIED_MARKER)); + grants = grants | tg; + denies = denies | td; + LOGGER.debug("Added Permissions for {} g{} d{} => g{} d{}",new + Object[]{principal,tg,td,grants,denies}); + + } + /* + * Deal with any proxy principals, these override groups + */ + if (principalTokenResolver != null) { + Set inspected = Sets.newHashSet(); + if ( acl.containsKey(_SECRET_KEY)) { + String secretKey = (String) acl.get(_SECRET_KEY); + if ( secretKey != null ) { + for (Entry ace : acl.entrySet()) { + String k = ace.getKey(); + LOGGER.debug("Checking {} ",k); + if (k.startsWith(DYNAMIC_PRINCIPAL_STEM)) { + String proxyPrincipal = AclModification.getPrincipal(k).substring(DYNAMIC_PRINCIPAL_STEM.length()); + if ( !inspected.contains(proxyPrincipal)) { + inspected.add(proxyPrincipal); + LOGGER.debug("Is Dynamic {}, checking ",k); + try { + // principalTokenValidators are not safe code, hence we must re-enable full access control. + compilingPermissions.suspend(); + List proxyPrincipalTokens = principalTokenResolver.resolveTokens(proxyPrincipal); + for ( Content proxyPrincipalToken : proxyPrincipalTokens ) { + if ( principalTokenValidator.validatePrincipal(proxyPrincipalToken, secretKey)) { + String pname = DYNAMIC_PRINCIPAL_STEM+proxyPrincipal; + LOGGER.debug("Has this principal {} ", proxyPrincipal); + int tg = toInt(acl.get(pname + + AclModification.GRANTED_MARKER)); + int td = toInt(acl.get(pname + + AclModification.DENIED_MARKER)); + grants = grants | tg; + denies = denies | td; + LOGGER.debug("Added Permissions for {} g{} d{} => g{} d{}",new + Object[]{pname, tg,td,grants,denies}); + break; + } + } + } finally { + // when done, we must resume compiling permissions where we were. + // NB, the code is re-entrant. + compilingPermissions.resume(); + } + } + } + } + } else { + LOGGER.debug("Secret Key is null"); + } + } else { + LOGGER.debug("No Secret Key Key "); + } + } else { + LOGGER.debug("No principalToken Resolver"); + } + // then deal with static principals + for (String principal : getPrincipals(authorizable) ) { + if ( principal.startsWith("_") ) { + throw new StorageClientException("Princials may not start with _ "); + } + int tg = toInt(acl.get(principal + + AclModification.GRANTED_MARKER)); + int td = toInt(acl + .get(principal + AclModification.DENIED_MARKER)); + grants = grants | tg; + denies = denies | td; + LOGGER.debug("Added Permissions for {} g{} d{} => g{} d{}",new + Object[]{principal,tg,td,grants,denies}); + } + + // Everyone must be the last principal to be applied + if (!User.ANON_USER.equals(authorizable.getId())) { + // all users except anon are in the group everyone, by default + // but only if not already denied or granted by a more specific + // permission. + int tg = (toInt(acl.get(Group.EVERYONE + + AclModification.GRANTED_MARKER)) & ~denies); + int td = (toInt(acl.get(Group.EVERYONE + + AclModification.DENIED_MARKER)) & ~grants); + grants = grants | tg; + denies = denies | td; + LOGGER.debug("Added Permissions for {} g{} d{} => g{} d{}",new + Object[]{Group.EVERYONE,tg,td,grants,denies}); + + } + /* + * grants contains the granted permissions in a bitmap denies + * contains the denied permissions in a bitmap + */ + int granted = grants; + int denied = denies; + + /* + * Only look to parent objects if this is not the root object and + * everything is not granted and denied + */ + if (recursion < 20 && !StorageClientUtils.isRoot(objectPath) + && (granted != 0xffff || denied != 0xffff)) { + recursion++; + int[] parentPriv = compilePermission(authorizable, objectType, + StorageClientUtils.getParentObjectPath(objectPath), recursion); + if (parentPriv != null) { + /* + * Grant permission not denied at this level parentPriv[0] + * is permissions granted by the parent ~denies is + * permissions not denied here parentPriv[0] & ~denies is + * permissions granted by the parent that have not been + * denied here. we need to add those to things granted here. + * ie | + */ + granted = grants | (parentPriv[0] & ~denies); + /* + * Deny permissions not granted at this level + */ + denied = denies | (parentPriv[1] & ~grants); + } + } + // If not denied all users and groups can read other users and + // groups and all content can be read + if (((denied & Permissions.CAN_READ.getPermission()) == 0) + && (Security.ZONE_AUTHORIZABLES.equals(objectType) || Security.ZONE_CONTENT + .equals(objectType))) { + granted = granted | Permissions.CAN_READ.getPermission(); + LOGGER.debug("Default Read Permission set {} {} ",key,denied); + } else { + LOGGER.debug("Default Read has been denied {} {} ",key, + denied); + } + LOGGER.debug("Permissions on {} for {} is {} {} ",new + Object[]{key,user.getId(),granted,denied}); + /* + * Keep a cached copy + */ + if (user.getId().equals(authorizable.getId())) { + cache.put(key, new int[] { granted, denied }); + } + return new int[] { granted, denied }; + + } + if (Security.ZONE_AUTHORIZABLES.equals(objectType) + || Security.ZONE_CONTENT.equals(objectType)) { + // unless explicitly denied all users can read other users. + return new int[] { Permissions.CAN_READ.getPermission(), 0 }; + } + return new int[] { 0, 0 }; + } finally { + // decrement the counter from here. + compilingPermissions.dec(); + } + } + + + private String[] getPrincipals(final Authorizable authorizable) { + String k = authorizable.getId(); + if (principalCache.containsKey(k)) { + return principalCache.get(k); + } + Set memberOfSet = Sets.newHashSet(authorizable.getPrincipals()); + if ( authorizableManager != null ) { + // membership resolution is possible, but we had better turn off recursion + if ( principalRecursionLock.get() == null ) { + principalRecursionLock.set("l"); + try { + for ( Iterator gi = authorizable.memberOf(authorizableManager); gi.hasNext(); ) { + memberOfSet.add(gi.next().getId()); + } + } finally { + principalRecursionLock.set(null); + } + } + } + memberOfSet.remove(Group.EVERYONE); + String[] m = memberOfSet.toArray(new String[memberOfSet.size()]); + principalCache.put(k, m); + return m; + } + + + private int toInt(Object object) { + if ( object instanceof Integer ) { + return ((Integer) object).intValue(); + } + LOGGER.debug("Bitmap Not Present"); + return 0; + } + + public String getCurrentUserId() { + return user.getId(); + } + + public void close() { + closed = true; + } + + private void checkOpen() throws StorageClientException { + if (closed) { + throw new StorageClientException("Access Control Manager is closed"); + } + } + + public boolean can(Authorizable authorizable, String objectType, String objectPath, + Permission permission) { + if ( compilingPermissions.isSet() ) { + return true; + } + if (authorizable instanceof User && ((User) authorizable).isAdmin()) { + return true; + } + // users can always operate on their own user object. + if (Security.ZONE_AUTHORIZABLES.equals(objectType) + && authorizable.getId().equals(objectPath)) { + return true; + } + try { + int[] privileges = compilePermission(authorizable, objectType, objectPath, 0); + if (!((permission.getPermission() & privileges[0]) == permission.getPermission())) { + return false; + } + } catch (StorageClientException e) { + LOGGER.warn(e.getMessage(), e); + return false; + } + return true; + } + + public Permission[] getPermissions(String objectType, String path) throws StorageClientException { + int[] perms = compilePermission(this.user, objectType, path, 0); + List permissions = Lists.newArrayList(); + for (Permission p : Permissions.PRIMARY_PERMISSIONS) { + if ((perms[0] & p.getPermission()) == p.getPermission()) { + permissions.add(p); + } + } + return permissions.toArray(new Permission[permissions.size()]); + } + + public String[] findPrincipals(String objectType, String objectPath, int permission, boolean granted) throws StorageClientException { + Map principalMap = internalCompilePrincipals(objectType, objectPath, 0); + LOGGER.debug("Got Principals {} ",principalMap); + List principals = Lists.newArrayList(); + for (Entry perm : principalMap.entrySet()) { + int[] p = perm.getValue(); + if ( granted && (p[0] & permission) == permission ) { + principals.add(perm.getKey()); + LOGGER.debug("Included {} {} {} ",new Object[]{perm.getKey(), perm.getValue(), permission}); + } else if ( !granted && (p[1] & permission) == permission) { + principals.add(perm.getKey()); + LOGGER.debug("Included {} {} {} ",new Object[]{perm.getKey(), perm.getValue(), permission}); + } else { + LOGGER.debug("Filtered {} {} {} ",new Object[]{perm.getKey(), perm.getValue(), permission}); + } + } + LOGGER.debug(" Found Principals {} ",principals); + return principals.toArray(new String[principals.size()]); + } + + + + private Map internalCompilePrincipals(String objectType, String objectPath, int recursion) throws StorageClientException { + Map compiledPermissions = Maps.newHashMap(); + String key = getAclKey(objectType, objectPath); + + Map acl = getCached(keySpace, aclColumnFamily, key); + + if (acl != null) { + LOGGER.debug("Checking {} {} ",key,acl); + for (Entry ace : acl.entrySet()) { + String aceKey = ace.getKey(); + String principal = aceKey.substring(0, aceKey.length() - 2); + + if (!compiledPermissions.containsKey(principal)) { + int tg = toInt(acl.get(principal + + AclModification.GRANTED_MARKER)); + int td = toInt(acl.get(principal + + AclModification.DENIED_MARKER)); + compiledPermissions.put(principal, new int[] { tg, td }); + LOGGER.debug("added {} ",principal); + } + + } + } + /* + * grants contains the granted permissions in a bitmap denies contains + * the denied permissions in a bitmap + */ + + /* + * Only look to parent objects if this is not the root object and + * everything is not granted and denied + */ + if (recursion < 20 && !StorageClientUtils.isRoot(objectPath)) { + recursion++; + Map parentPermissions = internalCompilePrincipals(objectType, + StorageClientUtils.getParentObjectPath(objectPath), recursion); + // add the parernt privileges in + for (Entry parentPermission : parentPermissions.entrySet()) { + int[] thisPriv = new int[2]; + String principal = parentPermission.getKey(); + if (compiledPermissions.containsKey(principal)) { + thisPriv = compiledPermissions.get(principal); + LOGGER.debug("modified {} ",principal); + } else { + LOGGER.debug("creating {} ",principal); + } + int[] parentPriv = parentPermission.getValue(); + + /* + * Grant permission not denied at this level parentPriv[0] is + * permissions granted by the parent ~denies is permissions not + * denied here parentPriv[0] & ~denies is permissions granted by + * the parent that have not been denied here. we need to add + * those to things granted here. ie | + */ + int granted = thisPriv[0] | (parentPriv[0] & ~thisPriv[1]); + /* + * Deny permissions not granted at this level + */ + int denied = thisPriv[1] | (parentPriv[1] & ~thisPriv[0]); + + compiledPermissions.put(principal, new int[] { granted, denied }); + + } + } + + // + // If not denied all users and groups can read other users and + // groups and all content can be read + for (String principal : new String[] { Group.EVERYONE, User.ANON_USER }) { + int[] perm = new int[2]; + if (compiledPermissions.containsKey(principal)) { + perm = compiledPermissions.get(principal); + } + if (((perm[1] & Permissions.CAN_READ.getPermission()) == 0) + && (Security.ZONE_AUTHORIZABLES.equals(objectType) || Security.ZONE_CONTENT + .equals(objectType))) { + perm[0] = perm[0] | Permissions.CAN_READ.getPermission(); + LOGGER.debug("added Default {} ",principal); + compiledPermissions.put(principal, perm); + } + } + compiledPermissions.put(User.ADMIN_USER, new int[] { 0xffff, 0x0000}); + return compiledPermissions; + // only store those permissions the match the requested set.] + + + } + + @Override + protected Logger getLogger() { + return LOGGER; + } + + public void setAuthorizableManager(AuthorizableManager authorizableManager) { + this.authorizableManager = authorizableManager; + } + + + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AccessControlledMap.java b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AccessControlledMap.java new file mode 100644 index 00000000..5681d6e3 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AccessControlledMap.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.accesscontrol; + +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; + + +public class AccessControlledMap extends HashMap { + + + private PropertyAcl propertyAcl; + + public AccessControlledMap(PropertyAcl propertyAcl) { + this.propertyAcl = propertyAcl; + } + /** + * + */ + private static final long serialVersionUID = -6550830558631198709L; + + @Override + public V put(K key, V value) { + if ( propertyAcl.canWrite(key)) { + return super.put(key, value); + } + return null; + } + + @Override + public void putAll(Map m) { + for ( Entry e : m.entrySet()) { + put(e.getKey(), e.getValue()); + } + } + + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AuthenticatorImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AuthenticatorImpl.java similarity index 61% rename from src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AuthenticatorImpl.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AuthenticatorImpl.java index 96260f71..5d80c524 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AuthenticatorImpl.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AuthenticatorImpl.java @@ -17,35 +17,37 @@ */ package org.sakaiproject.nakamura.lite.accesscontrol; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.CacheHolder; import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; import org.sakaiproject.nakamura.api.lite.accesscontrol.Authenticator; import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.util.EnabledPeriod; +import org.sakaiproject.nakamura.lite.CachingManagerImpl; import org.sakaiproject.nakamura.lite.authorizable.UserInternal; -import org.sakaiproject.nakamura.lite.storage.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Map; - -public class AuthenticatorImpl implements Authenticator { +public class AuthenticatorImpl extends CachingManagerImpl implements Authenticator { private static final Logger LOGGER = LoggerFactory.getLogger(AuthenticatorImpl.class); - private StorageClient client; private String keySpace; private String authorizableColumnFamily; - public AuthenticatorImpl(StorageClient client, Configuration configuration) { - this.client = client; + public AuthenticatorImpl(StorageClient client, Configuration configuration, Map sharedCache) { + super(client, sharedCache); this.keySpace = configuration.getKeySpace(); this.authorizableColumnFamily = configuration.getAuthorizableColumnFamily(); } public User authenticate(String userid, String password) { try { - Map userAuthMap = client - .get(keySpace, authorizableColumnFamily, userid); + Map userAuthMap = getCached(keySpace, authorizableColumnFamily, userid); if (userAuthMap == null) { LOGGER.debug("User was not found {}", userid); return null; @@ -55,29 +57,48 @@ public User authenticate(String userid, String password) { String storedPassword = (String) userAuthMap .get(User.PASSWORD_FIELD); if (passwordHash.equals(storedPassword)) { - return new UserInternal(userAuthMap, false); + if ( EnabledPeriod.isInEnabledPeriod((String) userAuthMap.get(User.LOGIN_ENABLED_PERIOD_FIELD)) ) { + return new UserInternal(userAuthMap, null, false); + } } LOGGER.debug("Failed to authentication, passwords did not match"); } catch (StorageClientException e) { LOGGER.debug("Failed To authenticate " + e.getMessage(), e); + } catch (AccessDeniedException e) { + LOGGER.debug("Failed To system authenticate user " + e.getMessage(), e); } return null; } - public User systemAuthenticate(String userid) { + return internalSystemAuthenticate(userid, false); + } + public User systemAuthenticateBypassEnable(String userid) { + return internalSystemAuthenticate(userid, true); + } + + private User internalSystemAuthenticate(String userid, boolean forceEnableLogin) { try { - Map userAuthMap = client - .get(keySpace, authorizableColumnFamily, userid); + Map userAuthMap = getCached(keySpace, authorizableColumnFamily, userid); if (userAuthMap == null || userAuthMap.size() == 0) { LOGGER.debug("User was not found {}", userid); return null; } - return new UserInternal(userAuthMap, false); + if ( forceEnableLogin || EnabledPeriod.isInEnabledPeriod((String) userAuthMap.get(User.LOGIN_ENABLED_PERIOD_FIELD)) ) { + return new UserInternal(userAuthMap, null, false); + } } catch (StorageClientException e) { LOGGER.debug("Failed To system authenticate user " + e.getMessage(), e); + } catch (AccessDeniedException e) { + LOGGER.debug("Failed To system authenticate user " + e.getMessage(), e); } return null; } + @Override + protected Logger getLogger() { + return LOGGER; + } + + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/DefaultPrincipalValidator.java b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/DefaultPrincipalValidator.java new file mode 100644 index 00000000..8c7a5e44 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/DefaultPrincipalValidator.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.accesscontrol; + +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorPlugin; +import org.sakaiproject.nakamura.api.lite.content.Content; + +public class DefaultPrincipalValidator implements PrincipalValidatorPlugin { + + public boolean validate(Content proxyPrincipalToken) { + // TODO add some standard validation steps like date + return true; + } + + public String[] getProtectedFields() { + return new String[0]; + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/PrincipalTokenValidator.java b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/PrincipalTokenValidator.java new file mode 100644 index 00000000..7faa6cae --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/PrincipalTokenValidator.java @@ -0,0 +1,144 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.accesscontrol; + +import org.apache.commons.codec.binary.Base64; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorPlugin; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.UnsupportedEncodingException; +import java.security.InvalidKeyException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +import javax.crypto.Mac; +import javax.crypto.spec.SecretKeySpec; + +public class PrincipalTokenValidator { + + + public static final String VALIDATORPLUGIN = "validatorplugin"; + public static final String _ACLTOKEN = "_acltoken"; + private static final String HMAC_SHA512 = "HmacSHA512"; + private static final Logger LOGGER = LoggerFactory.getLogger(PrincipalTokenValidator.class); + private PrincipalValidatorPlugin defaultPrincipalValidator = new DefaultPrincipalValidator(); + private PrincipalValidatorResolver principalValidatorResolver; + + public PrincipalTokenValidator(PrincipalValidatorResolver principalValidatorResolver) { + this.principalValidatorResolver = principalValidatorResolver; + } + + public boolean validatePrincipal(Content proxyPrincipalToken, String sharedKey) { + if ( proxyPrincipalToken == null) { + LOGGER.debug("Failed to Validate Token at no content item "); + return false; + } + if ( !proxyPrincipalToken.hasProperty(_ACLTOKEN)) { + LOGGER.debug("Failed to Validate Token at {} no ACL Token ", proxyPrincipalToken.getPath()); + return false; + } + PrincipalValidatorPlugin plugin = null; + if ( proxyPrincipalToken.hasProperty(VALIDATORPLUGIN) ) { + plugin = principalValidatorResolver.getPluginByName((String) proxyPrincipalToken.getProperty(VALIDATORPLUGIN)); + } else { + plugin = defaultPrincipalValidator; + } + if ( plugin == null ) { + LOGGER.debug("Failed to Validate Token at {} no plugin "); + return false; + } + String hmac = signToken(proxyPrincipalToken, sharedKey, plugin); + if ( hmac == null || !hmac.equals(proxyPrincipalToken.getProperty(_ACLTOKEN)) ) { + LOGGER.debug("Failed to Validate Token at {} as {}, does not match ",proxyPrincipalToken.getPath(), hmac); + return false; + } + boolean validate = plugin.validate(proxyPrincipalToken); + if ( validate ) { + LOGGER.debug("Validated Token at {} as {} using plugin {} ",new Object[] { proxyPrincipalToken.getPath(), hmac, plugin}); + } else { + LOGGER.debug("Invalid Token at {} as {} using plugin {} ",new Object[] { proxyPrincipalToken.getPath(), hmac, plugin}); + } + return validate; + } + + public void signToken(Content token, String sharedKey ) throws StorageClientException { + PrincipalValidatorPlugin plugin = null; + if ( token.hasProperty(VALIDATORPLUGIN) ) { + plugin = principalValidatorResolver.getPluginByName((String) token.getProperty(VALIDATORPLUGIN)); + } else { + plugin = defaultPrincipalValidator; + } + if ( plugin == null ) { + throw new StorageClientException("The property validatorplugin does not specify an active PricipalValidatorPlugin, cant sign"); + } + token.setProperty(_ACLTOKEN, signToken(token, sharedKey, plugin)); + } + + private String signToken(Content token, String sharedKey, PrincipalValidatorPlugin plugin) { + try { + MessageDigest md = MessageDigest.getInstance("SHA-512"); + byte[] input = sharedKey.getBytes("UTF-8"); + byte[] data = md.digest(input); + SecretKeySpec key = new SecretKeySpec(data, HMAC_SHA512); + return getHmac(token, plugin.getProtectedFields(), key); + } catch (InvalidKeyException e) { + LOGGER.warn(e.getMessage()); + LOGGER.debug(e.getMessage(),e); + return null; + } catch (NoSuchAlgorithmException e) { + LOGGER.warn(e.getMessage()); + LOGGER.debug(e.getMessage(),e); + return null; + } catch (IllegalStateException e) { + LOGGER.warn(e.getMessage()); + LOGGER.debug(e.getMessage(),e); + return null; + } catch (UnsupportedEncodingException e) { + LOGGER.warn(e.getMessage()); + LOGGER.debug(e.getMessage(),e); + return null; + } + } + + + private String getHmac(Content principalToken, String[] extraFields, SecretKeySpec key) throws NoSuchAlgorithmException, InvalidKeyException, IllegalStateException, UnsupportedEncodingException { + StringBuilder sb = new StringBuilder(); + sb.append(principalToken.getPath()).append("@"); + if ( principalToken.hasProperty(VALIDATORPLUGIN)) { + sb.append(principalToken.getProperty(VALIDATORPLUGIN)).append("@"); + } + for (String f : extraFields) { + if ( principalToken.hasProperty(f)) { + sb.append(principalToken.getProperty(f)).append("@"); + } else { + sb.append("null").append("@"); + } + } + Mac m = Mac.getInstance(HMAC_SHA512); + m.init(key); + String message = sb.toString(); + LOGGER.debug("Signing {} ", message); + m.update(message.getBytes("UTF-8")); + return Base64.encodeBase64URLSafeString(m.doFinal()); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/PrincipalValidatorResolverImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/PrincipalValidatorResolverImpl.java new file mode 100644 index 00000000..b4ad2444 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/PrincipalValidatorResolverImpl.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.accesscontrol; + +import com.google.common.collect.Maps; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorPlugin; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; + +import java.util.Map; + +@Component(immediate=true, metatype=true, enabled=true) +@Service(value=PrincipalValidatorResolver.class) +public class PrincipalValidatorResolverImpl implements PrincipalValidatorResolver { + + protected Map pluginStore = Maps.newConcurrentMap(); + + public PrincipalValidatorPlugin getPluginByName(String key) { + return pluginStore.get(key); + } + + public void registerPlugin(String key, PrincipalValidatorPlugin plugin) { + pluginStore.put(key, plugin); + } + public void unregisterPlugin(String key) { + pluginStore.remove(key); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/PropertyAcl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/PropertyAcl.java new file mode 100644 index 00000000..deab4bb4 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/PropertyAcl.java @@ -0,0 +1,69 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.accesscontrol; + +import java.io.Serializable; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; + +public class PropertyAcl implements Serializable { + + /** + * + */ + private static final long serialVersionUID = -3998584870894631478L; + private Set readDenied; + private Set writeDenied; + + public PropertyAcl(Map denies) { + Set r = Sets.newHashSet(); + Set w = Sets.newHashSet(); + for (Entry ace : denies.entrySet()) { + if ((Permissions.CAN_READ_PROPERTY.getPermission() & ace.getValue()) == Permissions.CAN_READ_PROPERTY + .getPermission()) { + r.add(ace.getKey()); + } + if ((Permissions.CAN_WRITE_PROPERTY.getPermission() & ace.getValue()) == Permissions.CAN_WRITE_PROPERTY + .getPermission()) { + w.add(ace.getKey()); + } + } + readDenied = ImmutableSet.copyOf(r.toArray(new String[r.size()])); + writeDenied = ImmutableSet.copyOf(w.toArray(new String[w.size()])); + } + + public PropertyAcl() { + readDenied = ImmutableSet.of(); + writeDenied = ImmutableSet.of(); + } + + public Set readDeniedSet() { + return readDenied; + } + + public boolean canWrite(Object key) { + return !writeDenied.contains(key); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/ThreadBoundStackReferenceCounter.java b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/ThreadBoundStackReferenceCounter.java new file mode 100644 index 00000000..428a2701 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/ThreadBoundStackReferenceCounter.java @@ -0,0 +1,99 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.accesscontrol; + +import java.util.List; + +import com.google.common.collect.Lists; + +/** + * Maintains a thread bound reference counter that can be suspended and resumed. + * When suspended the current counter us pushed to a stack, and the new counter + * is started. When resumed, the current counter is replaced with the counter on + * the stack. The operations are all bound to the current thread. inc() dec() + * and suspend() resume() should be used in matching pairs protected by try { + * ... } finally { ... } constructs. The class makes no attempt to guess what + * the code is doing. + * + * @author ieb + * + */ +public class ThreadBoundStackReferenceCounter { + + // dont use initial value to avoid JVM bugs. + private ThreadLocal counter = new ThreadLocal(); + private ThreadLocal> suspended = new ThreadLocal>(); + + public void inc() { + set(get() + 1); + } + + public void dec() { + set(get() - 1); + } + + public void suspend() { + push(get()); + set(0); + } + + public void resume() { + set(pop()); + } + + public boolean isSet() { + return get() > 0; + } + + private int get() { + Integer c = counter.get(); + if (c == null) { + return 0; + } + return c.intValue(); + } + + private void set(int i) { + if (i < 0) { + i = 0; + } + counter.set(i); + } + + private void push(int i) { + List s = suspended.get(); + if (s == null) { + s = Lists.newArrayList(); + suspended.set(s); + } + s.add(i); + } + + private int pop() { + List s = suspended.get(); + if (s == null) { + s = Lists.newArrayList(); + suspended.set(s); + } + if (s.size() == 0) { + return 0; + } + return s.remove(s.size() - 1); + } + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableActivator.java b/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableActivator.java similarity index 95% rename from src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableActivator.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableActivator.java index e5fe9640..b982287f 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableActivator.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableActivator.java @@ -25,7 +25,7 @@ import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; import org.sakaiproject.nakamura.api.lite.authorizable.User; -import org.sakaiproject.nakamura.lite.storage.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -77,7 +77,7 @@ private void createSystemUser() throws StorageClientException { User.SYSTEM_USER); if (authorizableMap == null || authorizableMap.size() == 0) { Map user = ImmutableMap.of(Authorizable.ID_FIELD, - User.SYSTEM_USER, Authorizable.NAME_FIELD, + (Object)User.SYSTEM_USER, Authorizable.NAME_FIELD, User.SYSTEM_USER, Authorizable.PASSWORD_FIELD, "--no-password--", Authorizable.AUTHORIZABLE_TYPE_FIELD, Authorizable.USER_VALUE); @@ -94,7 +94,7 @@ private void createAdminUser() throws StorageClientException { User.ADMIN_USER); if (authorizableMap == null || authorizableMap.size() == 0) { Map user = ImmutableMap.of(Authorizable.ID_FIELD, - User.ADMIN_USER, Authorizable.NAME_FIELD, + (Object)User.ADMIN_USER, Authorizable.NAME_FIELD, User.ADMIN_USER, Authorizable.PASSWORD_FIELD, StorageClientUtils.secureHash("admin"), Authorizable.AUTHORIZABLE_TYPE_FIELD, Authorizable.USER_VALUE); @@ -110,7 +110,7 @@ private void createAnonUser() throws StorageClientException { User.ANON_USER); if (authorizableMap == null || authorizableMap.size() == 0) { Map user = ImmutableMap.of(Authorizable.ID_FIELD, - User.ANON_USER, Authorizable.NAME_FIELD, + (Object)User.ANON_USER, Authorizable.NAME_FIELD, User.ANON_USER, Authorizable.PASSWORD_FIELD, Authorizable.NO_PASSWORD, Authorizable.AUTHORIZABLE_TYPE_FIELD, Authorizable.USER_VALUE); diff --git a/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableManagerImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableManagerImpl.java similarity index 63% rename from src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableManagerImpl.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableManagerImpl.java index 4fedb9b7..7ffc0e42 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableManagerImpl.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/AuthorizableManagerImpl.java @@ -17,13 +17,14 @@ */ package org.sakaiproject.nakamura.lite.authorizable; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.commons.lang.StringUtils; import org.sakaiproject.nakamura.api.lite.CacheHolder; import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.Session; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import org.sakaiproject.nakamura.api.lite.StoreListener; @@ -37,16 +38,20 @@ import org.sakaiproject.nakamura.api.lite.authorizable.Group; import org.sakaiproject.nakamura.api.lite.authorizable.User; import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; -import org.sakaiproject.nakamura.lite.CachingManager; +import org.sakaiproject.nakamura.lite.CachingManagerImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; -import org.sakaiproject.nakamura.lite.storage.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.SparseRow; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Arrays; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; /** * An Authourizable Manager bound to a user, on creation the user ID specified @@ -55,11 +60,16 @@ * @author ieb * */ -public class AuthorizableManagerImpl extends CachingManager implements AuthorizableManager { +public class AuthorizableManagerImpl extends CachingManagerImpl implements AuthorizableManager { + private static final String DISABLED_PASSWORD_HASH = "--disabled--"; private static final Set FILTER_ON_UPDATE = ImmutableSet.of(Authorizable.ID_FIELD, - Authorizable.PASSWORD_FIELD); + Authorizable.PASSWORD_FIELD, Authorizable.LOGIN_ENABLED_PERIOD_FIELD); private static final Set FILTER_ON_CREATE = ImmutableSet.of(Authorizable.ID_FIELD, + Authorizable.PASSWORD_FIELD, Authorizable.LOGIN_ENABLED_PERIOD_FIELD); + private static final Set ADMIN_FILTER_ON_UPDATE = ImmutableSet.of(Authorizable.ID_FIELD, + Authorizable.PASSWORD_FIELD); + private static final Set ADMIN_FILTER_ON_CREATE = ImmutableSet.of(Authorizable.ID_FIELD, Authorizable.PASSWORD_FIELD); private static final Logger LOGGER = LoggerFactory.getLogger(AuthorizableManagerImpl.class); private String currentUserId; @@ -71,9 +81,12 @@ public class AuthorizableManagerImpl extends CachingManager implements Authoriza private boolean closed; private Authenticator authenticator; private StoreListener storeListener; + private Session session; + private Set filterOnUpdate; + private Set filterOnCreate; - public AuthorizableManagerImpl(User currentUser, StorageClient client, - Configuration configuration, AccessControlManager accessControlManager, + public AuthorizableManagerImpl(User currentUser, Session session, StorageClient client, + Configuration configuration, AccessControlManagerImpl accessControlManager, Map sharedCache, StoreListener storeListener) throws StorageClientException, AccessDeniedException { super(client, sharedCache); @@ -82,13 +95,22 @@ public AuthorizableManagerImpl(User currentUser, StorageClient client, throw new RuntimeException("Current User ID shoud not be null"); } this.thisUser = currentUser; + if ( thisUser.isAdmin() ) { + filterOnUpdate = ADMIN_FILTER_ON_UPDATE; + filterOnCreate = ADMIN_FILTER_ON_CREATE; + } else { + filterOnUpdate = FILTER_ON_UPDATE; + filterOnCreate = FILTER_ON_CREATE; + } + this.session = session; this.client = client; this.accessControlManager = accessControlManager; this.keySpace = configuration.getKeySpace(); this.authorizableColumnFamily = configuration.getAuthorizableColumnFamily(); - this.authenticator = new AuthenticatorImpl(client, configuration); + this.authenticator = new AuthenticatorImpl(client, configuration, sharedCache); this.closed = false; this.storeListener = storeListener; + accessControlManager.setAuthorizableManager(this); } public User getUser() { @@ -112,18 +134,40 @@ public Authorizable findAuthorizable(final String authorizableId) throws AccessD return null; } if (isAUser(authorizableMap)) { - return new UserInternal(authorizableMap, false); + return new UserInternal(authorizableMap, session, false); } else if (isAGroup(authorizableMap)) { - return new GroupInternal(authorizableMap, false); + return new GroupInternal(authorizableMap, session, false); } return null; } public void updateAuthorizable(Authorizable authorizable) throws AccessDeniedException, StorageClientException { + updateAuthorizable(authorizable, true); + } + + public void updateAuthorizable(Authorizable authorizable, boolean withTouch) throws AccessDeniedException, + StorageClientException { checkOpen(); + if ( !withTouch && !thisUser.isAdmin() ) { + throw new StorageClientException("Only admin users can update without touching the user"); + } String id = authorizable.getId(); + if ( authorizable.isImmutable() ) { + throw new StorageClientException("You cant update an immutable authorizable:"+id); + } + if ( authorizable.isReadOnly() ) { + return; + } + if ( authorizable.isNew() ) { + throw new StorageClientException("You must create an authorizable if its new, you cant update an new authorizable"); + } accessControlManager.check(Security.ZONE_AUTHORIZABLES, id, Permissions.CAN_WRITE); + if ( !authorizable.isModified() ) { + return; + // only perform the update and send the event if we see the authorizable as modified. It will be modified ig group membership was changed. + } + /* * Update the principal records for members. The list of members that * have been added and removed is converted into a list of Authorzables. @@ -138,10 +182,14 @@ public void updateAuthorizable(Authorizable authorizable) throws AccessDeniedExc * permission at some point in the future. */ String type = "type:user"; + List attributes = Lists.newArrayList(); + String[] membersAdded = null; + String[] membersRemoved = null; + if (authorizable instanceof Group) { type = "type:group"; Group group = (Group) authorizable; - String[] membersAdded = group.getMembersAdded(); + membersAdded = group.getMembersAdded(); Authorizable[] newMembers = new Authorizable[membersAdded.length]; int i = 0; for (String newMember : membersAdded) { @@ -166,7 +214,7 @@ public void updateAuthorizable(Authorizable authorizable) throws AccessDeniedExc i++; } i = 0; - String[] membersRemoved = group.getMembersRemoved(); + membersRemoved = group.getMembersRemoved(); Authorizable[] retiredMembers = new Authorizable[membersRemoved.length]; for (String retiredMember : membersRemoved) { try { @@ -181,7 +229,9 @@ public void updateAuthorizable(Authorizable authorizable) throws AccessDeniedExc } - LOGGER.debug("Membership Change added [{}] removed [{}] ", Arrays.toString(newMembers), Arrays.toString(retiredMembers)); + String membersAddedCsv = StringUtils.join(membersAdded, ','); + String membersRemovedCsv = StringUtils.join(membersRemoved, ','); + LOGGER.debug("Membership Change added [{}] removed [{}] ", membersAddedCsv, membersRemovedCsv); int changes = 0; // there is now a sparse list of authorizables, that need changing for (Authorizable newMember : newMembers) { @@ -190,7 +240,8 @@ public void updateAuthorizable(Authorizable authorizable) throws AccessDeniedExc if (newMember.isModified()) { Map encodedProperties = StorageClientUtils .getFilteredAndEcodedMap(newMember.getPropertiesForUpdate(), - FILTER_ON_UPDATE); + filterOnUpdate); + encodedProperties.put(Authorizable.ID_FIELD, newMember.getId()); putCached(keySpace, authorizableColumnFamily, newMember.getId(), encodedProperties, newMember.isNew()); LOGGER.debug("Updated {} with principal {} {} ",new Object[]{newMember.getId(), group.getId(), encodedProperties}); @@ -208,7 +259,8 @@ public void updateAuthorizable(Authorizable authorizable) throws AccessDeniedExc if (retiredMember.isModified()) { Map encodedProperties = StorageClientUtils .getFilteredAndEcodedMap(retiredMember.getPropertiesForUpdate(), - FILTER_ON_UPDATE); + filterOnUpdate); + encodedProperties.put(Authorizable.ID_FIELD, retiredMember.getId()); putCached(keySpace, authorizableColumnFamily, retiredMember.getId(), encodedProperties, retiredMember.isNew()); changes++; @@ -220,24 +272,56 @@ public void updateAuthorizable(Authorizable authorizable) throws AccessDeniedExc } } LOGGER.debug(" Finished Updating other principals, made {} changes, Saving Changes to {} ", changes, id); - } + // if there were added or removed members, send them out as event properties for + // external integration + if (membersAdded.length > 0) { + attributes.add("added:" + membersAddedCsv); + } + if (membersRemoved.length > 0) { + attributes.add("removed:" + membersRemovedCsv); + } + } + attributes.add(type); + boolean wasNew = authorizable.isNew(); + Map beforeUpdateProperties = authorizable.getOriginalProperties(); Map encodedProperties = StorageClientUtils.getFilteredAndEcodedMap( - authorizable.getPropertiesForUpdate(), FILTER_ON_UPDATE); - encodedProperties.put(Authorizable.LASTMODIFIED,System.currentTimeMillis()); - encodedProperties.put(Authorizable.LASTMODIFIED_BY,accessControlManager.getCurrentUserId()); + authorizable.getPropertiesForUpdate(), filterOnUpdate); + if (withTouch) { + encodedProperties.put(Authorizable.LASTMODIFIED_FIELD, System.currentTimeMillis()); + encodedProperties.put(Authorizable.LASTMODIFIED_BY_FIELD, + accessControlManager.getCurrentUserId()); + } + encodedProperties.put(Authorizable.ID_FIELD, id); // make certain the ID is always there. putCached(keySpace, authorizableColumnFamily, id, encodedProperties, authorizable.isNew()); authorizable.reset(getCached(keySpace, authorizableColumnFamily, id)); - storeListener.onUpdate(Security.ZONE_AUTHORIZABLES, id, accessControlManager.getCurrentUserId(), true, type); + String[] attrs = attributes.toArray(new String[attributes.size()]); + storeListener.onUpdate(Security.ZONE_AUTHORIZABLES, id, type, accessControlManager.getCurrentUserId(), wasNew, beforeUpdateProperties, attrs); + // for each added or removed member, send an UPDATE event so indexing can properly + // record the groups each member is a member of.\ + + // when we add members we dont emit an event with resource type in it. + if (membersAdded != null) { + for (String added : membersAdded) { + storeListener.onUpdate(Security.ZONE_AUTHORIZABLES, added, accessControlManager.getCurrentUserId(), null, false, null); + } + } + if (membersRemoved != null) { + for (String removed : membersRemoved) { + storeListener.onUpdate(Security.ZONE_AUTHORIZABLES, removed, accessControlManager.getCurrentUserId(), null, false, null); + } + } } + public boolean createAuthorizable(String authorizableId, String authorizableName, String password, Map properties) throws AccessDeniedException, StorageClientException { + checkId(authorizableId); if (properties == null) { properties = Maps.newHashMap(); } @@ -258,7 +342,7 @@ public boolean createAuthorizable(String authorizableId, String authorizableName return false; } Map encodedProperties = StorageClientUtils.getFilteredAndEcodedMap( - properties, FILTER_ON_CREATE); + properties, filterOnCreate); encodedProperties.put(Authorizable.ID_FIELD, authorizableId); encodedProperties .put(Authorizable.NAME_FIELD, authorizableName); @@ -269,14 +353,29 @@ public boolean createAuthorizable(String authorizableId, String authorizableName encodedProperties.put(Authorizable.PASSWORD_FIELD, Authorizable.NO_PASSWORD); } - encodedProperties.put(Authorizable.CREATED, + encodedProperties.put(Authorizable.CREATED_FIELD, System.currentTimeMillis()); - encodedProperties.put(Authorizable.CREATED_BY, + encodedProperties.put(Authorizable.CREATED_BY_FIELD, accessControlManager.getCurrentUserId()); putCached(keySpace, authorizableColumnFamily, authorizableId, encodedProperties, true); return true; } + + private void checkId(String authorizableId) throws StorageClientException { + if ( authorizableId.charAt(0) == '_') { + throw new StorageClientException("Authorizables may not start with _ :"+authorizableId); + } + for ( int i = 0; i < authorizableId.length(); i++) { + int cp = authorizableId.codePointAt(i); + if ( Character.isWhitespace(cp) || + Character.isISOControl(cp) || + Character.isMirrored(cp) ) { + throw new StorageClientException("Authorizables may not contain :"+authorizableId.charAt(i)); + } + } + } + public boolean createUser(String authorizableId, String authorizableName, String password, Map properties) throws AccessDeniedException, StorageClientException { if (properties == null) { @@ -308,12 +407,39 @@ public boolean createGroup(String authorizableId, String authorizableName, public void delete(String authorizableId) throws AccessDeniedException, StorageClientException { checkOpen(); accessControlManager.check(Security.ZONE_ADMIN, authorizableId, Permissions.CAN_DELETE); - removeFromCache(keySpace, authorizableColumnFamily, authorizableId); - client.remove(keySpace, authorizableColumnFamily, authorizableId); - storeListener.onDelete(Security.ZONE_AUTHORIZABLES, authorizableId, accessControlManager.getCurrentUserId()); + Authorizable authorizable = findAuthorizable(authorizableId); + if (authorizable != null){ + removeCached(keySpace, authorizableColumnFamily, authorizableId); + storeListener.onDelete(Security.ZONE_AUTHORIZABLES, authorizableId, accessControlManager.getCurrentUserId(), getType(authorizable), authorizable.getOriginalProperties()); + } + } + + private String getType(Authorizable authorizable) { + if ( authorizable != null ) { + if ( authorizable.hasProperty(Authorizable.AUTHORIZABLE_TYPE_FIELD)) { + return (String) authorizable.getProperty(Authorizable.AUTHORIZABLE_TYPE_FIELD); + } else if ( authorizable instanceof Group) { + return Authorizable.GROUP_VALUE; + } else if ( authorizable instanceof User) { + // this was an object. + return String.valueOf(Authorizable.USER_VALUE); + } + + } + return null; + } + private String getType(Map props) { + if ( props != null ) { + if ( props.containsKey(Authorizable.AUTHORIZABLE_TYPE_FIELD)) { + return (String) props.get(Authorizable.AUTHORIZABLE_TYPE_FIELD); + } + } + return null; } + + public void close() { closed = true; } @@ -333,19 +459,21 @@ public void changePassword(Authorizable authorizable, String password, String ol if (!thisUser.isAdmin()) { User u = authenticator.authenticate(id, oldPassword); if (u == null) { - throw new StorageClientException( + throw new IllegalArgumentException( "Unable to change passwords, old password does not match"); } } putCached(keySpace, authorizableColumnFamily, id, ImmutableMap.of( - Authorizable.LASTMODIFIED, + Authorizable.LASTMODIFIED_FIELD, (Object)System.currentTimeMillis(), - Authorizable.LASTMODIFIED_BY, + Authorizable.ID_FIELD, + id, + Authorizable.LASTMODIFIED_BY_FIELD, accessControlManager.getCurrentUserId(), Authorizable.PASSWORD_FIELD, StorageClientUtils.secureHash(password)), false); - storeListener.onUpdate(Security.ZONE_AUTHORIZABLES, id, currentUserId, false, "op:change-password"); + storeListener.onUpdate(Security.ZONE_AUTHORIZABLES, id, currentUserId, getType(authorizable), false, null, "op:change-password"); } else { throw new AccessDeniedException(Security.ZONE_ADMIN, id, @@ -355,7 +483,7 @@ public void changePassword(Authorizable authorizable, String password, String ol } - public Iterator findAuthorizable(String propertyName, String value, + public DisposableIterator findAuthorizable(String propertyName, String value, Class authorizableType) throws StorageClientException { Builder builder = ImmutableMap.builder(); if (value != null) { @@ -366,13 +494,14 @@ public Iterator findAuthorizable(String propertyName, String value } else if (authorizableType.equals(Group.class)) { builder.put(Authorizable.AUTHORIZABLE_TYPE_FIELD, Authorizable.GROUP_VALUE); } - final Iterator> authMaps = client.find(keySpace, - authorizableColumnFamily, builder.build()); + final DisposableIterator> authMaps = client.find(keySpace, + authorizableColumnFamily, builder.build(), this); return new PreemptiveIterator() { private Authorizable authorizable; + @Override protected boolean internalHasNext() { while (authMaps.hasNext()) { Map authMap = authMaps.next(); @@ -387,15 +516,17 @@ protected boolean internalHasNext() { .check(Security.ZONE_AUTHORIZABLES, (String) authMap.get(Authorizable.ID_FIELD), Permissions.CAN_READ); if (isAUser(authMap)) { - authorizable = new UserInternal(authMap, false); + authorizable = new UserInternal(authMap, session, false); + return true; + } else if (isAGroup(authMap)) { + authorizable = new GroupInternal(authMap, session, false); return true; - } else if (isAGroup(authMap)) - authorizable = new GroupInternal(authMap, false); - return true; + } } catch (AccessDeniedException e) { LOGGER.debug("Search result filtered ", e.getMessage()); } catch (StorageClientException e) { LOGGER.error("Failed to check ACLs ", e.getMessage()); + close(); return false; } @@ -403,13 +534,19 @@ protected boolean internalHasNext() { } authorizable = null; + close(); return false; } + @Override protected Authorizable internalNext() { return authorizable; } - + @Override + public void close() { + authMaps.close(); + super.close(); + } }; } @@ -444,5 +581,55 @@ protected Logger getLogger() { return LOGGER; } + public void disablePassword(Authorizable authorizable) throws StorageClientException, + AccessDeniedException { + String id = authorizable.getId(); + + if (thisUser.isAdmin()) { + putCached(keySpace, authorizableColumnFamily, id, ImmutableMap.of( + Authorizable.LASTMODIFIED_FIELD, + (Object)System.currentTimeMillis(), + Authorizable.ID_FIELD, + id, + Authorizable.LASTMODIFIED_BY_FIELD, + accessControlManager.getCurrentUserId(), + Authorizable.PASSWORD_FIELD, + DISABLED_PASSWORD_HASH), false); + + storeListener.onUpdate(Security.ZONE_AUTHORIZABLES, id, currentUserId, getType(authorizable), false, null, "op:disable-password"); + + } else { + throw new AccessDeniedException(Security.ZONE_ADMIN, id, + "Not allowed to disable the password, must be an admin user", + currentUserId); + } + } + + + public void triggerRefresh(String id) throws StorageClientException, AccessDeniedException { + Authorizable c = findAuthorizable(id); + if ( c != null ) { + String type = getType(c); + storeListener.onUpdate(Security.ZONE_AUTHORIZABLES, id, + accessControlManager.getCurrentUserId(), type, false, null, + new String[] { type }); + } + } + + public void triggerRefreshAll() throws StorageClientException { + if (User.ADMIN_USER.equals(accessControlManager.getCurrentUserId()) ) { + DisposableIterator all = client.listAll(keySpace, authorizableColumnFamily); + try { + while(all.hasNext()) { + Map c = all.next().getProperties(); + if ( c.containsKey(Authorizable.ID_FIELD) ) { + storeListener.onUpdate(Security.ZONE_AUTHORIZABLES, (String)c.get(Authorizable.ID_FIELD), User.ADMIN_USER, getType(c), false, null, (String[]) null); + } + } + } finally { + all.close(); // not necessary if the wile completes, but if there is an error it might be. + } + } + } } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/GroupInternal.java b/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/GroupInternal.java new file mode 100644 index 00000000..8b69eaea --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/GroupInternal.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.authorizable; + +import com.google.common.collect.ImmutableMap; + +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.Group; + +import java.util.Map; + +public class GroupInternal extends Group { + + public GroupInternal(Map groupMap, Session session, boolean objectIsNew) + throws StorageClientException, AccessDeniedException { + super(groupMap, session); + setObjectNew(objectIsNew); + } + + public GroupInternal(ImmutableMap groupMap, Session session, + boolean objectIsNew, boolean readOnly) throws StorageClientException, + AccessDeniedException { + super(groupMap, session); + setObjectNew(objectIsNew); + setReadOnly(readOnly); + } + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/UserInternal.java b/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/UserInternal.java new file mode 100644 index 00000000..95dbb963 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/authorizable/UserInternal.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.authorizable; + +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.User; + +import java.util.Map; + +public class UserInternal extends User { + + public UserInternal(Map groupMap, Session session, boolean objectIsNew) + throws StorageClientException, AccessDeniedException { + super(groupMap, session); + setObjectNew(objectIsNew); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/content/AccessControlManagerTokenWrapper.java b/core/src/main/java/org/sakaiproject/nakamura/lite/content/AccessControlManagerTokenWrapper.java new file mode 100644 index 00000000..daf0e8b3 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/content/AccessControlManagerTokenWrapper.java @@ -0,0 +1,145 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.content; + + +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessControlManager; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permission; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalTokenResolver; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.lite.accesscontrol.PropertyAcl; + +import java.util.Map; + +public class AccessControlManagerTokenWrapper implements AccessControlManager { + + private AccessControlManager delegate; + private PrincipalTokenResolver principalTokenResovler; + + public AccessControlManagerTokenWrapper(AccessControlManager accessControlManager, + PrincipalTokenResolver principalResolver) { + this.delegate = accessControlManager; + this.principalTokenResovler = principalResolver; + } + + public Map getAcl(String objectType, String objectPath) + throws StorageClientException, AccessDeniedException { + try { + delegate.setRequestPrincipalResolver(principalTokenResovler); + return delegate.getAcl(objectType, objectPath); + } finally { + delegate.clearRequestPrincipalResolver(); + } + } + + public Map getEffectiveAcl(String objectType, String objectPath) + throws StorageClientException, AccessDeniedException { + try { + delegate.setRequestPrincipalResolver(principalTokenResovler); + return delegate.getEffectiveAcl(objectType, objectPath); + } finally { + delegate.clearRequestPrincipalResolver(); + } + } + + public void setAcl(String objectType, String objectPath, AclModification[] aclModifications) + throws StorageClientException, AccessDeniedException { + try { + delegate.setRequestPrincipalResolver(principalTokenResovler); + delegate.setAcl(objectType, objectPath, aclModifications); + } finally { + delegate.clearRequestPrincipalResolver(); + } + } + + public void check(String objectType, String objectPath, Permission permission) + throws AccessDeniedException, StorageClientException { + try { + delegate.setRequestPrincipalResolver(principalTokenResovler); + delegate.check(objectType, objectPath, permission); + } finally { + delegate.clearRequestPrincipalResolver(); + } + } + + public String getCurrentUserId() { + return delegate.getCurrentUserId(); + } + + public boolean can(Authorizable authorizable, String objectType, String objectPath, + Permission permission) { + throw new UnsupportedOperationException(); + } + + public Permission[] getPermissions(String objectType, String objectPath) + throws StorageClientException { + try { + delegate.setRequestPrincipalResolver(principalTokenResovler); + return delegate.getPermissions(objectType, objectPath); + } finally { + delegate.clearRequestPrincipalResolver(); + } + } + + public String[] findPrincipals(String objectType, String objectPath, int permission, + boolean granted) throws StorageClientException { + try { + delegate.setRequestPrincipalResolver(principalTokenResovler); + return delegate.findPrincipals(objectType, objectPath, permission, granted); + } finally { + delegate.clearRequestPrincipalResolver(); + } + } + + public void setRequestPrincipalResolver(PrincipalTokenResolver tokenPrincipalResolver) { + if ( principalTokenResovler instanceof ChainingPrincipalTokenResolver) { + ((ChainingPrincipalTokenResolver)tokenPrincipalResolver).setNextTokenResovler(tokenPrincipalResolver); + } + } + + public void clearRequestPrincipalResolver() { + if ( principalTokenResovler instanceof ChainingPrincipalTokenResolver) { + ((ChainingPrincipalTokenResolver)principalTokenResovler).clearNextTokenResolver(); + } + } + + public void signContentToken(Content token, String objectType, String objectPath) throws StorageClientException, + AccessDeniedException { + try { + delegate.setRequestPrincipalResolver(principalTokenResovler); + delegate.signContentToken(token, objectType, objectPath); + } finally { + delegate.clearRequestPrincipalResolver(); + } + } + + public PropertyAcl getPropertyAcl(String objectType, String objectPath) + throws AccessDeniedException, StorageClientException { + try { + delegate.setRequestPrincipalResolver(principalTokenResovler); + return delegate.getPropertyAcl(objectType, objectPath); + } finally { + delegate.clearRequestPrincipalResolver(); + } + } + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/content/BlockContentInputStream.java b/core/src/main/java/org/sakaiproject/nakamura/lite/content/BlockContentInputStream.java similarity index 97% rename from src/main/java/org/sakaiproject/nakamura/lite/content/BlockContentInputStream.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/content/BlockContentInputStream.java index 513f11da..87a23d9f 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/content/BlockContentInputStream.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/content/BlockContentInputStream.java @@ -19,7 +19,8 @@ import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.lite.storage.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockSetContentHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/content/ChainingPrincipalTokenResolver.java b/core/src/main/java/org/sakaiproject/nakamura/lite/content/ChainingPrincipalTokenResolver.java new file mode 100644 index 00000000..4e44d274 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/content/ChainingPrincipalTokenResolver.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.content; + +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalTokenResolver; + +public interface ChainingPrincipalTokenResolver { + + void setNextTokenResovler(PrincipalTokenResolver nextTokenResolver); + + void clearNextTokenResolver(); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/content/ContentManagerImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/content/ContentManagerImpl.java new file mode 100644 index 00000000..a8a06907 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/content/ContentManagerImpl.java @@ -0,0 +1,1310 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.content; + +import static org.sakaiproject.nakamura.lite.content.InternalContent.BLOCKID_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.BODY_CREATED_BY_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.BODY_CREATED_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.BODY_LAST_MODIFIED_BY_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.BODY_LAST_MODIFIED_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.COPIED_DEEP_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.COPIED_FROM_ID_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.COPIED_FROM_PATH_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.CREATED_BY_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.CREATED_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.DELETED_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.LASTMODIFIED_BY_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.LASTMODIFIED_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.LENGTH_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.LINKED_PATH_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.NEXT_VERSION_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.PATH_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.PREVIOUS_BLOCKID_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.PREVIOUS_VERSION_UUID_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.READONLY_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.STRUCTURE_UUID_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.TRUE; +import static org.sakaiproject.nakamura.lite.content.InternalContent.UUID_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.VERSION_HISTORY_ID_FIELD; +import static org.sakaiproject.nakamura.lite.content.InternalContent.VERSION_NUMBER_FIELD; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import com.google.common.collect.Ordering; +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.StorageConstants; +import org.sakaiproject.nakamura.api.lite.StoreListener; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessControlManager; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalTokenResolver; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification.Operation; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.content.ActionRecord; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.sakaiproject.nakamura.lite.CachingManagerImpl; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.SparseRow; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +/** + *
+ * Content Manager.
+ * Manages two types of content,
+ * Bundles of content properties and bodies.
+ * Bodies are chunked into sizes to aide efficiency when retrieving the content.
+ * 
+ * CF content stores the structure of the content keyed by path.
+ * Each item contains child names in columns + the guid of the item
+ * eg
+ *   path : {
+ *       ':id' : thisitemUUID,
+ *       subitemA : subitemAUUID,
+ *       subitemB : subitemBUUID
+ *   }
+ * the guid of the item points to the CF content version where items are keyed by the version.
+ * These items also contain child nodes under children as an array
+ * 
+ * eg
+ *    itemUUID : {
+ *         'id' : thisitemUUID
+ *         'children' : [ 
+ *           subitemA : subitemAUUID,
+ *           subitemB : subitemBUUID
+ *         ],
+ *         'nblocks' = numberOfBlocksSetsOfContent
+ *         'length' = totalLenghtOftheContent
+ *         'blocksize' = storageBlockSize
+ *         'blockid' = blockID
+ *         ... other properties ...
+ *    }
+ *    
+ * The content blocks are stored in CF content body
+ * eg
+ *   blockID:blockSetNumber : {
+ *         'id' : blockID,
+ *         'numblocks' : numberOfBlocksInThisSet,
+ *         'blocklength0' : lengthOfThisBlock,
+ *         'body0' : byte[]
+ *         'blocklength1' : lengthOfThisBlock,
+ *         'body1' : byte[]
+ *         ...
+ *         'blocklengthn' : lengthOfThisBlock,
+ *         'bodyn' : byte[]
+ *    }
+ * 
+ * 
+ * Versioning:
+ * 
+ * When a version is saved, the CF contentVersion item is cloned and the CF content :id and any subitems IDs are updated.
+ * Block 0 is marked as readonly
+ * 
+ * When the body is written to its CF content row is checked to see if the block is read only. If so a new block is created with and linked in with 'previousversion'
+ * A version object is also created to keep track of the versions.
+ * 
+ * 
+ * + * @author ieb + * + */ +public class ContentManagerImpl extends CachingManagerImpl implements ContentManager { + + private static final Logger LOGGER = LoggerFactory.getLogger(ContentManagerImpl.class); + + + private static final Set PROTECTED_FIELDS = ImmutableSet.of(LASTMODIFIED_FIELD, + LASTMODIFIED_BY_FIELD, + UUID_FIELD, + PATH_FIELD); + + // These properties copied from AccessControlManager to keep from binding + // directly to the implementation class. They should stay in sync. + private static final String _SECRET_KEY = "_secretKey"; + private static final String _PATH = "_aclPath"; + private static final String _OBJECT_TYPE = "_aclType"; + private static final String _KEY = "_aclKey"; + private static final Set ACL_READ_ONLY_PROPERTIES = ImmutableSet.of(_SECRET_KEY, _PATH, _OBJECT_TYPE, _KEY); + + /** + * Storage Client + */ + private StorageClient client; + /** + * The access control manager in use. + */ + private AccessControlManager accessControlManager; + /** + * Key space for this content. + */ + private String keySpace; + /** + * Column Family for this content. + */ + private String contentColumnFamily; + + private boolean closed; + + private StoreListener eventListener; + + + private PathPrincipalTokenResolver pathPrincipalResolver; + + public ContentManagerImpl(StorageClient client, AccessControlManager accessControlManager, + Configuration config, Map sharedCache, StoreListener eventListener) { + super(client, sharedCache); + this.client = client; + keySpace = config.getKeySpace(); + contentColumnFamily = config.getContentColumnFamily(); + closed = false; + this.eventListener = eventListener; + String userId = accessControlManager.getCurrentUserId(); + String usersTokenPath = StorageClientUtils.newPath(userId, "private/tokens"); + this.pathPrincipalResolver = new PathPrincipalTokenResolver(usersTokenPath, this); + this.accessControlManager = new AccessControlManagerTokenWrapper(accessControlManager, pathPrincipalResolver); + } + + + public boolean exists(String path) { + try { + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); + Map structure = getCached(keySpace, contentColumnFamily, path); + if (exists(structure)) { + String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + Map content = getCached(keySpace, contentColumnFamily, contentId); + if (exists(content)) { + return true; + } + } + } catch (AccessDeniedException e) { + LOGGER.debug(e.getMessage(), e); + } catch (StorageClientException e) { + LOGGER.debug(e.getMessage(), e); + } + return false; + } + + public Content get(String path) throws StorageClientException, AccessDeniedException { + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); + Map structure = getCached(keySpace, contentColumnFamily, path); + if (exists(structure)) { + String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + Map content = getCached(keySpace, contentColumnFamily, contentId); + if (exists(content)) { + Content contentObject = new Content(path, content); + ((InternalContent) contentObject).internalize(this, false); + return contentObject; + } + } + return null; + + } + + + public Iterator listChildren(String path) throws StorageClientException { + final DisposableIterator> childContent = client.listChildren(keySpace, + contentColumnFamily, path, this); + return new PreemptiveIterator() { + + private Content content; + + @Override + protected boolean internalHasNext() { + content = null; + while(content == null && childContent.hasNext()) { + try { + Map structureMap = childContent.next(); + LOGGER.debug("Loaded Next as {} ", structureMap); + if (exists(structureMap)) { + String path = (String) structureMap.get(PATH_FIELD); + content = get(path); + } + } catch (AccessDeniedException e) { + LOGGER.debug(e.getMessage(),e); + } catch (StorageClientException e) { + LOGGER.debug(e.getMessage(),e); + } + } + if (content == null) { + // this is over the top as a disposable iterator should close auto + childContent.close(); + super.close(); + return false; + } + return true; + } + + @Override + protected Content internalNext() { + return content; + } + }; + } + + public Iterator listChildPaths(final String path) throws StorageClientException { + final Iterator> childContent = client.listChildren(keySpace, + contentColumnFamily, path, this); + return new PreemptiveIterator() { + + private String childPath; + + @Override + protected boolean internalHasNext() { + while(childContent.hasNext()) { + try { + Map structureMap = childContent.next(); + LOGGER.debug("Loaded Next child of {} as {} ", path, structureMap); + if (exists(structureMap)) { + String childContentId = (String) structureMap.get(STRUCTURE_UUID_FIELD); + Map childContent = getCached(keySpace, contentColumnFamily, childContentId); + if (exists(childContent)) { + String testChildPath = (String) structureMap.get(PATH_FIELD); + accessControlManager.check(Security.ZONE_CONTENT, testChildPath, Permissions.CAN_READ); + childPath = testChildPath; + // this is not that efficient since it requires the map is + // loaded, at the moment I don't have a way round this with the + // underlying index structure. + LOGGER.debug("Got Next Child of {} as {} ", path, childPath); + return true; + } + } + } catch (AccessDeniedException e) { + LOGGER.debug(e.getMessage(),e); + } catch (StorageClientException e) { + LOGGER.debug(e.getMessage(),e); + } + } + LOGGER.debug("No more"); + childPath = null; + super.close(); + return false; + } + + @Override + protected String internalNext() { + return childPath; + } + }; + } + + public void triggerRefresh(String path) throws StorageClientException, AccessDeniedException { + Content c = get(path); + if ( c != null ) { + eventListener.onUpdate(Security.ZONE_CONTENT, path, accessControlManager.getCurrentUserId(), getResourceType(c), false, null, "op:update"); + } + } + + private String getResourceType(InternalContent c) { + String resourceType = null; + if ( c != null ) { + if ( c.hasProperty(Content.SLING_RESOURCE_TYPE_FIELD)) { + resourceType = (String) c.getProperty(Content.SLING_RESOURCE_TYPE_FIELD); + } else if ( c.hasProperty(Content.RESOURCE_TYPE_FIELD)) { + resourceType = (String) c.getProperty(Content.RESOURCE_TYPE_FIELD); + } else if ( c.hasProperty(Content.MIMETYPE_FIELD)) { + resourceType = (String) c.getProperty(Content.MIMETYPE_FIELD); + } + } + return resourceType; + } + private String getResourceType(Map c) { + String resourceType = null; + if ( c != null ) { + if ( c.containsKey(Content.SLING_RESOURCE_TYPE_FIELD)) { + resourceType = (String) c.get(Content.SLING_RESOURCE_TYPE_FIELD); + } else if ( c.containsKey(Content.RESOURCE_TYPE_FIELD)) { + resourceType = (String) c.get(Content.RESOURCE_TYPE_FIELD); + } else if ( c.containsKey(Content.MIMETYPE_FIELD)) { + resourceType = (String) c.get(Content.MIMETYPE_FIELD); + } + } + return resourceType; + } + + + public void triggerRefreshAll() throws StorageClientException { + if (User.ADMIN_USER.equals(accessControlManager.getCurrentUserId()) ) { + DisposableIterator all = client.listAll(keySpace, contentColumnFamily); + try { + while(all.hasNext()) { + Map c = all.next().getProperties(); + if ( c.containsKey(PATH_FIELD) && !c.containsKey(STRUCTURE_UUID_FIELD)) { + + eventListener.onUpdate(Security.ZONE_CONTENT, (String)c.get(PATH_FIELD), User.ADMIN_USER, getResourceType(c), false, null, "op:update"); + } + } + } finally { + all.close(); + } + } + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.lite.content.ContentManager#replace(org.sakaiproject.nakamura.api.lite.content.Content) + */ + // TODO unit test + public void replace(Content content) throws AccessDeniedException, + StorageClientException { + replace(content, true); + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.lite.content.ContentManager#replace(org.sakaiproject.nakamura.api.lite.content.Content, boolean) + */ + // TODO unit test + public void replace(Content content, boolean withTouch) + throws AccessDeniedException, StorageClientException { + Content current = get(content.getPath()); + if (current != null) { + Set diffKeys = diffKeys(current.getProperties(), content.getProperties()); + for (String diffKey : diffKeys) { + content.setProperty(diffKey, new RemoveProperty()); + } + } + update(content, withTouch); + } + + /** + * Set the keys in update to new RemoveProperty() if they are + * in current but not in update. System properties are ignored + * which is the only difference to {@link StorageClientUtils#diffKeys(Map, Map)}. + * + * @param current + * The current content found at the location. + * @param update + * The content that will be used to update the location. + * @return Set of keys to remove from update. + */ + private Set diffKeys(Map current, Map update) { + Set diffKeys = StorageClientUtils.diffKeys(current, update); + if (diffKeys.size() > 0) { + // remove system properties + Iterator keysIter = diffKeys.iterator(); + while (keysIter.hasNext()) { + String diffKey = keysIter.next(); + if (diffKey.startsWith(Repository.SYSTEM_PROP_PREFIX)) { + keysIter.remove(); + } + } + } + return diffKeys; + } + + public void update(Content content) throws AccessDeniedException, StorageClientException { + update(content, Boolean.TRUE); + } + + public void update(Content excontent, boolean withTouch) throws AccessDeniedException, StorageClientException { + checkOpen(); + InternalContent content = (InternalContent) excontent; + String path = content.getPath(); + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_WRITE); + String id = null; + Map toSave = null; + // deal with content that already exists, but has been marked as new by merging in the new content. + // content that is deleted wont appear in this layer + if (content.isNew()) { + Content existingContent = get(path); + if ( existingContent != null ) { + Map properties = content.getProperties(); + for ( Entry e : properties.entrySet()) { + existingContent.setProperty(e.getKey(), e.getValue()); + } + content = existingContent; + } + } + + Map originalProperties = ImmutableMap.of(); + boolean touch = withTouch || !User.ADMIN_USER.equals(accessControlManager.getCurrentUserId()); + + if (content.isNew()) { + + // create the parents if necessary + if (!StorageClientUtils.isRoot(path)) { + String parentPath = StorageClientUtils.getParentObjectPath(path); + Content parentContent = get(parentPath); + if (parentContent == null) { + update(new Content(parentPath, null), withTouch); + } + } + toSave = Maps.newHashMap(content.getPropertiesForUpdate()); + id = StorageClientUtils.getInternalUuid(); + // if the user is admin we allow overwriting of protected fields. This should allow content migration. + toSave.put(UUID_FIELD, id); + toSave.put(PATH_FIELD, path); + toSave.put(CREATED_FIELD, + touch ? System.currentTimeMillis() : content.getProperty(CREATED_FIELD)); + toSave.put(CREATED_BY_FIELD, + touch ? accessControlManager.getCurrentUserId() : content.getProperty(CREATED_BY_FIELD)); + toSave.put(LASTMODIFIED_FIELD, + touch ? System.currentTimeMillis() : content.getProperty(LASTMODIFIED_FIELD)); + toSave.put(LASTMODIFIED_BY_FIELD, + touch? accessControlManager.getCurrentUserId() : content.getProperty(LASTMODIFIED_BY_FIELD)); + toSave.put(DELETED_FIELD, new RemoveProperty()); // make certain the deleted field is not set + LOGGER.debug("New Content with {} {} ", id, toSave); + } else if (content.isUpdated()) { + originalProperties = content.getOriginalProperties(); + toSave = Maps.newHashMap(content.getPropertiesForUpdate()); + + + // only admin can bypass the lastModified fields using withTouch=false + if (touch) { + for (String field : PROTECTED_FIELDS) { + LOGGER.debug("Resetting value for {} to {}", field, originalProperties.get(field)); + toSave.put(field, originalProperties.get(field)); + } + toSave.put(LASTMODIFIED_FIELD, System.currentTimeMillis()); + toSave.put(LASTMODIFIED_BY_FIELD, + accessControlManager.getCurrentUserId()); + toSave.put(DELETED_FIELD, new RemoveProperty()); // make certain the deleted field is not set + } else { + toSave.put(UUID_FIELD, originalProperties.get(UUID_FIELD)); + } + id = (String)toSave.get(UUID_FIELD); + LOGGER.debug("Updating Content with {} {} ", id, toSave); + } else { + // if not new or updated, don't update. + return; + } + + Map checkContent = getCached(keySpace, contentColumnFamily, id); + if (exists(checkContent) && TRUE.equals((String)checkContent.get(READONLY_FIELD))) { + throw new AccessDeniedException(Security.ZONE_CONTENT, path, + "update on read only Content Item (possibly a previous version of the item)", + accessControlManager.getCurrentUserId()); + } + boolean isnew = false; + if (content.isNew()) { + isnew = true; + putCached(keySpace, contentColumnFamily, path, + ImmutableMap.of(STRUCTURE_UUID_FIELD, (Object)id, PATH_FIELD, path, DELETED_FIELD, new RemoveProperty()), true); + } else { + // get the structure field to see if we need to update that + Map structure = getCached(keySpace, contentColumnFamily, path); + if (!exists(structure)) { + // rewrite the structure field resetting the deleted field. + isnew = true; + putCached(keySpace, contentColumnFamily, path, + ImmutableMap.of(STRUCTURE_UUID_FIELD, (Object)id, PATH_FIELD, path, DELETED_FIELD, new RemoveProperty()), true); + } else { + String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + Map updateContent = getCached(keySpace, contentColumnFamily, contentId); + if (updateContent == null || updateContent.size() == 0 || TRUE.equals(updateContent.get(DELETED_FIELD))) { + // rewrite the structure field resetting the deleted field. + isnew = true; + putCached(keySpace, contentColumnFamily, path, + ImmutableMap.of(STRUCTURE_UUID_FIELD, (Object)id, PATH_FIELD, path, DELETED_FIELD, new RemoveProperty()), true); + } + } + } + // save the content id. + putCached(keySpace, contentColumnFamily, id, toSave, isnew); + LOGGER.debug("Saved {} at {} as {} ", new Object[] { path, id, toSave }); + // reset state to unmodified to take further modifications. + content.reset(getCached(keySpace, contentColumnFamily, id)); + + eventListener.onUpdate(Security.ZONE_CONTENT, path, accessControlManager.getCurrentUserId(), getResourceType(content), isnew, originalProperties, "op:update"); + } + + public void delete(String path) throws AccessDeniedException, StorageClientException { + delete(path, false); + } + + public void delete(String path, boolean recurse) throws AccessDeniedException, StorageClientException { + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_DELETE); + Iterator children = listChildPaths(path); + if (!recurse && children.hasNext()) { + throw new StorageClientException("Unable to delete a path with active children [" + + path + "]. Set recurse=true to delete a tree."); + } + + while (children.hasNext()) { + String child = children.next(); + delete(child, true); + } + + Map structure = getCached(keySpace, contentColumnFamily, path); + if (exists(structure)) { + String uuid = (String)structure.get(STRUCTURE_UUID_FIELD); + Map content = getCached(keySpace, contentColumnFamily, uuid); + if (exists(content)) { + Map contentBeforeDelete = ImmutableMap.copyOf(content); + String resourceType = (String) content.get("sling:resourceType"); + putCached(keySpace, contentColumnFamily, uuid, + ImmutableMap.of(DELETED_FIELD, (Object) TRUE), false); + eventListener.onDelete(Security.ZONE_CONTENT, path, accessControlManager.getCurrentUserId(), resourceType, contentBeforeDelete); + } + // at 1.4 the deleted field was not on the structure object, this will auto migrate content. + putCached(keySpace, contentColumnFamily, path, + ImmutableMap.of(DELETED_FIELD, (Object) TRUE), false); + } + } + + private boolean exists(Map map) { + return map != null && map.size() > 0 && !TRUE.equals(map.get(DELETED_FIELD)); + } + + public long writeBody(String path, InputStream in) throws StorageClientException, + AccessDeniedException, IOException { + return writeBody(path, in, null); + } + + public long writeBody(String path, InputStream in, String streamId) + throws StorageClientException, AccessDeniedException, IOException { + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_WRITE); + Map structure = getCached(keySpace, contentColumnFamily, path); + if ( !exists(structure) ) { + Content contentObj = new Content(path,null); + update(contentObj); + structure = getCached(keySpace, contentColumnFamily, path); + } + String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + Map content = getCached(keySpace, contentColumnFamily, contentId); + if ( !exists(content)) { + Content contentObj = new Content(path,null); + update(contentObj); + structure = getCached(keySpace, contentColumnFamily, path); + contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + content = getCached(keySpace, contentColumnFamily, contentId); + } + boolean isnew = true; + String blockIdField = StorageClientUtils.getAltField(BLOCKID_FIELD, streamId); + if (content.containsKey(blockIdField)) { + isnew = false; + } + String contentBlockId = StorageClientUtils.getInternalUuid(); + + Map metadata = client.streamBodyIn(keySpace, contentColumnFamily, + contentId, contentBlockId, streamId, content, in); + metadata.put(StorageClientUtils.getAltField(BODY_LAST_MODIFIED_FIELD, streamId), + System.currentTimeMillis()); + metadata.put(StorageClientUtils.getAltField(BODY_LAST_MODIFIED_BY_FIELD, streamId), + accessControlManager.getCurrentUserId()); + if (isnew) { + metadata.put(StorageClientUtils.getAltField(BODY_CREATED_FIELD, streamId), + System.currentTimeMillis()); + metadata.put(StorageClientUtils.getAltField(BODY_CREATED_BY_FIELD, streamId), + accessControlManager.getCurrentUserId()); + } + putCached(keySpace, contentColumnFamily, contentId, metadata, isnew); + long length = 0; + String lengthFieldName = StorageClientUtils.getAltField(LENGTH_FIELD, streamId); + if (metadata.containsKey(lengthFieldName)) { + length = (Long) metadata.get(lengthFieldName); + } + eventListener.onUpdate(Security.ZONE_CONTENT, path, accessControlManager.getCurrentUserId(), getResourceType(content), false, null, "stream", streamId); + return length; + + } + + public InputStream getInputStream(String path) throws StorageClientException, + AccessDeniedException, IOException { + return getInputStream(path, null); + } + + public InputStream getInputStream(String path, String streamId) throws StorageClientException, + AccessDeniedException, IOException { + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); + Map structure = getCached(keySpace, contentColumnFamily, path); + if ( !exists(structure) ) { + return null; + } + LOGGER.debug("Structure Loaded {} {} ", path, structure); + String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + return internalGetInputStream(contentId, streamId); + } + + private InputStream internalGetInputStream(String contentId, String streamId) + throws StorageClientException, AccessDeniedException, IOException { + Map content = getCached(keySpace, contentColumnFamily, contentId); + if ( !exists(content) ) { + return null; + } + String contentBlockId = (String)content.get(StorageClientUtils + .getAltField(BLOCKID_FIELD, streamId)); + return client.streamBodyOut(keySpace, contentColumnFamily, contentId, contentBlockId, streamId, + content); + } + + public void close() { + closed = true; + } + + private void checkOpen() throws StorageClientException { + if (closed) { + throw new StorageClientException("Content Manager is closed"); + } + } + + // TODO: Unit test + /** + * {@inheritDoc} + * @see org.sakaiproject.nakamura.api.lite.content.ContentManager#copy(java.lang.String, java.lang.String, boolean) + */ + public void copy(String from, String to, boolean withStreams) throws StorageClientException, + AccessDeniedException, IOException { + checkOpen(); + // To Copy, get the to object out and copy everything over. + Content f = get(from); + if (f == null) { + throw new StorageClientException(" Source content " + from + " does not exist"); + } + if ( f.getProperty(UUID_FIELD) == null ) { + LOGGER.warn("Bad Content item with no ID cant be copied {} ",f); + throw new StorageClientException(" Source content " + from + " Has no "+UUID_FIELD); + } + Content t = get(to); + if (t != null) { + LOGGER.debug("Deleting {} ",to); + delete(to); + } + Set streams = Sets.newHashSet(); + Map copyProperties = Maps.newHashMap(); + if (withStreams) { + for (Entry p : f.getProperties().entrySet()) { + // Protected fields (such as ID and path) will differ between + // the source and destination, so don't copy them. + if (!PROTECTED_FIELDS.contains(p.getKey())) { + if (p.getKey().startsWith(BLOCKID_FIELD)) { + streams.add(p.getKey()); + } else { + copyProperties.put(p.getKey(), p.getValue()); + } + } + } + } else { + copyProperties.putAll(f.getProperties()); + } + copyProperties.put(COPIED_FROM_PATH_FIELD, from); + copyProperties.put(COPIED_FROM_ID_FIELD, f.getProperty(UUID_FIELD)); + copyProperties.put(COPIED_DEEP_FIELD, withStreams); + t = new Content(to, copyProperties); + update(t); + LOGGER.debug("Copy Updated {} {} ",to,t); + + for (String stream : streams) { + String streamId = null; + if (stream.length() > BLOCKID_FIELD.length()) { + streamId = stream.substring(BLOCKID_FIELD.length() + 1); + } + InputStream fromStream = getInputStream(from, streamId); + writeBody(to, fromStream); + fromStream.close(); + } + eventListener.onUpdate(Security.ZONE_CONTENT, to, accessControlManager.getCurrentUserId(), getResourceType(f), true, null, "op:copy"); + + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.lite.content.ContentManager#move(java.lang.String, + * java.lang.String) + */ + public List move(String from, String to) throws AccessDeniedException, + StorageClientException { + return move(from, to, false, true); + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.lite.content.ContentManager#move(java.lang.String, + * java.lang.String, boolean) + */ + public List move(String from, String to, boolean force) + throws AccessDeniedException, StorageClientException { + return move(from, to, force, true); + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.lite.content.ContentManager#move(java.lang.String, + * java.lang.String, boolean, boolean) + */ + public List move(String from, String to, boolean force, + boolean keepDestinationHistory) throws AccessDeniedException, StorageClientException { + List record = Lists.newArrayList(); + + // delete the nodes at `to` that aren't part of `from` if we're keeping destination + // history + if (keepDestinationHistory) { + // put the last element of the paths into a set for matching + PreemptiveIterator fromChildrenPathsIter = (PreemptiveIterator) listChildPaths(from); + Set fromChildrenPaths = Sets.newHashSet(); + while(fromChildrenPathsIter.hasNext()) { + fromChildrenPaths.add(lastElement(fromChildrenPathsIter.next())); + } + + // check for last elements in the `to` that aren't in the `from` since we're keeping + // destination history + Iterator toChildrenPaths = listChildPaths(to); + while (toChildrenPaths.hasNext()) { + String toChildPath = toChildrenPaths.next(); + if (!fromChildrenPaths.contains(lastElement(toChildPath))) { + delete(toChildPath, true); + } + } + } + moveContent(from, to, force, keepDestinationHistory); + + PreemptiveIterator iter = (PreemptiveIterator) listChildPaths(from); + while (iter.hasNext()) { + String childPath = iter.next(); + + // Since this is a direct child of the previous from, only the last token needs to + // be appended to "to" + record.addAll(move(childPath, + to.concat(childPath.substring(childPath.lastIndexOf("/"))), force, + keepDestinationHistory)); + } + + record.add(new ActionRecord(from, to)); + return record; + } + + private void moveContent(String from, String to, boolean force, boolean keepDestinationHistory) + throws AccessDeniedException, StorageClientException { + // to move, get the structure object out and modify, recreating parent + // objects as necessary. + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, from, Permissions.CAN_ANYTHING); + accessControlManager.check(Security.ZONE_CONTENT, to, + Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + Map fromStructure = Maps.newHashMap(getCached(keySpace, contentColumnFamily, from)); + String fromContentId = null; + Map fromContent = null; + if (exists(fromStructure)) { + fromContentId = (String)fromStructure.get(STRUCTURE_UUID_FIELD); + fromContent = getCached(keySpace, contentColumnFamily, fromContentId); + if (!exists(fromContent)) { + throw new StorageClientException("The source content to move from " + from + + " does not exist, move operation failed"); + } + } else { + throw new StorageClientException("The source to move from " + from + + " does not exist, move operation failed"); + } + Map toStructure = getCached(keySpace, contentColumnFamily, to); + if (exists(toStructure)) { + String toContentId = (String)toStructure.get(STRUCTURE_UUID_FIELD); + Map toContent = getCached(keySpace, contentColumnFamily, toContentId); + if (exists(toContent)) { + if (force) { + if (!keepDestinationHistory) { + delete(to); + } else { + // be sure to clean up our revision history to save orphans + String fromVersionHistoryId = (String) fromContent.get(VERSION_HISTORY_ID_FIELD); + if (fromVersionHistoryId != null) { + putCached(toContentId, contentColumnFamily, fromVersionHistoryId, + ImmutableMap.of(DELETED_FIELD, (Object) TRUE), false); + } + + // set our content to have the history of the destination + boolean updateFrom = false; + String versionHistoryId = (String) toContent.get(VERSION_HISTORY_ID_FIELD); + if (versionHistoryId != null) { + updateFrom = true; + fromContent.put(VERSION_HISTORY_ID_FIELD, versionHistoryId); + } + + // remove `to` properties that aren't in the `from` content. this allows us to + // replace the `to` with the `from` rather than accumulate the properties + Set diffKeys = diffKeys(toContent, fromContent); + if (diffKeys.size() > 0) { + updateFrom = true; + for (String diffKey : diffKeys) { + fromContent.put(diffKey, new RemoveProperty()); + } + } + if (updateFrom) { + putCached(keySpace, contentColumnFamily, fromContentId, fromContent, false); + } + } + } else { + throw new StorageClientException("The destination content to move to " + to + + " exists, move operation failed"); + } + } + } + + String idStore = (String) fromStructure.get(STRUCTURE_UUID_FIELD); + + // move the content to the new location, then delete the old. + if (!StorageClientUtils.isRoot(to)) { + // if not a root, modify the new parent location, creating the + // structured if necessary + String parent = StorageClientUtils.getParentObjectPath(to); + Map parentToStructure = getCached(keySpace, contentColumnFamily, + parent); + if (!exists(parentToStructure)) { + // create a new parent + Content content = new Content(parent, null); + update(content); + } + + } + // update the content data to reflect the new primary location. + putCached(keySpace, contentColumnFamily, idStore, + ImmutableMap.of(PATH_FIELD, (Object)to, DELETED_FIELD, new RemoveProperty()), false); + + // insert the new to Structure and remove the from + fromStructure.put(PATH_FIELD, to); + fromStructure.put(DELETED_FIELD, new RemoveProperty()); + putCached(keySpace, contentColumnFamily, to, fromStructure, true); + + // move the ACLs + moveAcl(from, to, force); + + // remove the old from. + putCached(keySpace, contentColumnFamily, from, ImmutableMap.of(DELETED_FIELD, (Object)TRUE), false); + // move does not add resourceTypes to events. + eventListener.onDelete(Security.ZONE_CONTENT, from, accessControlManager.getCurrentUserId(), null, null, "op:move"); + eventListener.onUpdate(Security.ZONE_CONTENT, to, accessControlManager.getCurrentUserId(), null, true, null, "op:move"); + + } + + // TODO: Unit test + public void link(String from, String to) throws AccessDeniedException, StorageClientException { + // a link places a pointer to the content in the parent of from, but + // does not delete or modify the structure of to. + // read from is required and write to. + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, to, Permissions.CAN_READ); + accessControlManager.check(Security.ZONE_CONTENT, from, + Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + Map toStructure = getCached(keySpace, contentColumnFamily, to); + if (!exists(toStructure)) { + throw new StorageClientException("The source content to link from " + to + + " does not exist, link operation failed"); + } + Map fromStructure = getCached(keySpace, contentColumnFamily, from); + if (exists(fromStructure)) { + throw new StorageClientException("The destination content to link to " + from + + " exists, link operation failed"); + } + + if (StorageClientUtils.isRoot(from)) { + throw new StorageClientException("The link " + to + + " is a root, not possible to create a soft link"); + } + + // create a new structure object pointing back to the shared location + + Object idStore = toStructure.get(STRUCTURE_UUID_FIELD); + // if not a root, modify the new parent location, creating the + // structured if necessary + String parent = StorageClientUtils.getParentObjectPath(from); + Map parentToStructure = getCached(keySpace, contentColumnFamily, parent); + if (!exists(parentToStructure)) { + // create a new parent + Content content = new Content(parent, null); + update(content); + } + + // create the new object for the path, pointing to the Object + putCached(keySpace, contentColumnFamily, from, ImmutableMap.of(STRUCTURE_UUID_FIELD, + idStore, PATH_FIELD, from, LINKED_PATH_FIELD, to, DELETED_FIELD, new RemoveProperty()), true); + + } + + public String saveVersion(String path) throws StorageClientException, AccessDeniedException { + return saveVersion(path, null); + } + + public String saveVersion(String path, Map versionMetadata) throws StorageClientException, AccessDeniedException { + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_WRITE); + Map structure = getCached(keySpace, contentColumnFamily, path); + if (!exists(structure)) { + throw new StorageClientException("Item "+path+" does not exist"); + } + String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + Map saveVersion = getCached(keySpace, contentColumnFamily, contentId); + if (!exists(saveVersion)) { + throw new StorageClientException("Item "+path+" does not exist"); + } + + // versionHistoryId is the UUID of the version history for this node. + + String saveVersionId = (String)saveVersion.get(UUID_FIELD); + + String versionHistoryId = (String)saveVersion.get(VERSION_HISTORY_ID_FIELD); + + if (versionHistoryId == null) { + versionHistoryId = StorageClientUtils.getInternalUuid(); + LOGGER.debug("Created new Version History UUID as {} for Object {} ",versionHistoryId, saveVersionId); + saveVersion.put(VERSION_HISTORY_ID_FIELD, versionHistoryId); + } else { + LOGGER.debug("Created new Version History UUID as {} for Object {} ",versionHistoryId, saveVersionId); + + } + + Map newVersion = Maps.newHashMap(saveVersion); + String newVersionId = StorageClientUtils.getInternalUuid(); + + + String saveBlockId = (String)saveVersion.get(BLOCKID_FIELD); + + newVersion.put(UUID_FIELD, newVersionId); + newVersion.put(PREVIOUS_VERSION_UUID_FIELD, saveVersionId); + if (saveBlockId != null) { + newVersion.put(PREVIOUS_BLOCKID_FIELD, saveBlockId); + } + + saveVersion.put(NEXT_VERSION_FIELD, newVersionId); + saveVersion.put(READONLY_FIELD, TRUE); + Object versionNumber = System.currentTimeMillis(); + saveVersion.put(VERSION_NUMBER_FIELD, versionNumber); + + if (versionMetadata != null) { + for (Entry entry : versionMetadata.entrySet()) { + saveVersion.put("metadata:" + entry.getKey(), entry.getValue()); + } + } + + putCached(keySpace, contentColumnFamily, saveVersionId, saveVersion, false); + putCached(keySpace, contentColumnFamily, newVersionId, newVersion, true); + putCached(keySpace, contentColumnFamily, versionHistoryId, + ImmutableMap.of(saveVersionId, versionNumber), true); + putCached(keySpace, contentColumnFamily, path, + ImmutableMap.of(STRUCTURE_UUID_FIELD, (Object)newVersionId), true); + if ( LOGGER.isDebugEnabled() ) { + LOGGER.debug("Saved Version History {} {} ", versionHistoryId, + getCached(keySpace, contentColumnFamily, versionHistoryId)); + LOGGER.debug("Saved Version [{}] {}", saveVersionId, saveVersion); + LOGGER.debug("New Version [{}] {}", newVersionId, newVersion); + LOGGER.debug("Structure {} ", getCached(keySpace, contentColumnFamily, path)); + } + return saveVersionId; + } + + public List getVersionHistory(String path) throws AccessDeniedException, + StorageClientException { + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); + Map structure = getCached(keySpace, contentColumnFamily, path); + if (exists(structure)) { + String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + Map content = getCached(keySpace, contentColumnFamily, contentId); + if (exists(content)) { + String versionHistoryId = (String)content + .get(VERSION_HISTORY_ID_FIELD); + if (versionHistoryId != null) { + final Map versionHistory = getCached(keySpace, + contentColumnFamily, versionHistoryId); + LOGGER.debug("Loaded Version History {} {} ", versionHistoryId, versionHistory); + versionHistory.remove(UUID_FIELD); + return Ordering.from(new Comparator() { + public int compare(String o1, String o2) { + long l1 = (Long) versionHistory.get(o1); + long l2 = (Long) versionHistory.get(o2); + long r = l2 - l1; + if (r == 0) { + return 0; + } else if (r < 0) { + return -1; + } + return 1; + } + }).sortedCopy(versionHistory.keySet()); + } + } + } + return Collections.emptyList(); + } + + // TODO: Unit test + public InputStream getVersionInputStream(String path, String versionId) + throws AccessDeniedException, StorageClientException, IOException { + return getVersionInputStream(path, versionId, null); + } + + // TODO: Unit test + public InputStream getVersionInputStream(String path, String versionId, String streamId) + throws AccessDeniedException, StorageClientException, IOException { + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); + checkOpen(); + Map structure = getCached(keySpace, contentColumnFamily, path); + if (exists(structure)) { + String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + Map content = getCached(keySpace, contentColumnFamily, contentId); + if (exists(content)) { + String versionHistoryId = (String)content + .get(VERSION_HISTORY_ID_FIELD); + if (versionHistoryId != null) { + Map versionHistory = getCached(keySpace, contentColumnFamily, + versionHistoryId); + if (versionHistory != null && versionHistory.containsKey(versionId)) { + return internalGetInputStream(versionId, streamId); + } + } + } + } + return null; + } + + public Content getVersion(String path, String versionId) throws StorageClientException, + AccessDeniedException { + checkOpen(); + accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); + Map structure = getCached(keySpace, contentColumnFamily, path); + if (exists(structure)) { + String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); + Map content = getCached(keySpace, contentColumnFamily, contentId); + if (exists(content)) { + String versionHistoryId = (String)content + .get(VERSION_HISTORY_ID_FIELD); + if (versionHistoryId != null) { + Map versionHistory = getCached(keySpace, contentColumnFamily, + versionHistoryId); + if (versionHistory != null && versionHistory.containsKey(versionId)) { + Map versionContent = getCached(keySpace, + contentColumnFamily, versionId); + if (exists(versionContent)) { + Content contentObject = new Content(path, versionContent); + ((InternalContent) contentObject).internalize(this, true); + return contentObject; + } else { + LOGGER.debug("No Content for path {} version History Null{} ", path, + versionHistoryId); + + } + } else { + LOGGER.debug("History null for path {} version History {} {} ", + new Object[] { path, versionHistoryId, versionHistory }); + } + } else { + LOGGER.debug("History Id null for path {} ", path); + } + } + } + return null; + } + + @Override + protected Logger getLogger() { + return LOGGER; + } + + public Iterable find(Map searchProperties) throws StorageClientException, + AccessDeniedException { + checkOpen(); + final Map finalSearchProperties = searchProperties; + return new Iterable() { + + public Iterator iterator() { + Iterator contentResultsIterator = null; + try { + final DisposableIterator> clientSearchKeysIterator = client.find(keySpace, contentColumnFamily, finalSearchProperties, ContentManagerImpl.this); + contentResultsIterator = new PreemptiveIterator() { + Content contentResult; + + protected boolean internalHasNext() { + contentResult = null; + while (contentResult == null && clientSearchKeysIterator.hasNext()) { + try { + Map structureMap = clientSearchKeysIterator.next(); + LOGGER.debug("Loaded Next as {} ", structureMap); + if (exists(structureMap)) { + String path = (String) structureMap.get(PATH_FIELD); + contentResult = get(path); + } + } catch (AccessDeniedException e) { + LOGGER.debug(e.getMessage(),e); + } catch (StorageClientException e) { + LOGGER.debug(e.getMessage(),e); + } + } + if (contentResult == null) { + close(); + return false; + } + return true; + } + + protected Content internalNext() { + return contentResult; + } + + @Override + public void close() { + clientSearchKeysIterator.close(); + super.close(); + }; + }; + } catch (StorageClientException e) { + LOGGER.error("Unable to iterate over sparsemap search results.", e); + } + return contentResultsIterator; + } + }; + } + + public int count(Map countSearch) throws StorageClientException { + Builder b = ImmutableMap.builder(); + b.putAll(countSearch); + b.put(StorageConstants.CUSTOM_STATEMENT_SET, "countestimate"); + b.put(StorageConstants.RAWRESULTS, true); + DisposableIterator> counts = client.find(keySpace, contentColumnFamily, b.build(), ContentManagerImpl.this); + try { + Map count = counts.next(); + return Integer.parseInt(String.valueOf(count.get("1"))); + } finally { + if ( counts != null ) { + counts.close(); + } + } + } + + + public boolean hasBody(String path, String streamId) throws StorageClientException, AccessDeniedException { + Content content = get(path); + return client.hasBody(content.getProperties(), streamId); + } + + public void setPrincipalTokenResolver(PrincipalTokenResolver principalTokenResolver) { + accessControlManager.setRequestPrincipalResolver(principalTokenResolver); + } + + public void cleanPrincipalTokenResolver() { + accessControlManager.clearRequestPrincipalResolver(); + } + + + /** + * Move ACLs from source to destination. This mirrors the move functionality found in + * {@link ContentManager}. + * + * @param from + * The source path where the ACLs are applied. + * @param to + * The source path where the ACLs are to be applied. + * @param force + * Whether to forcefully move to the destination (i.e. overwrite) + * @return + * @throws AccessDeniedException + * @throws StorageClientException + * @see Security#ZONE_ADMIN, Security#ZONE_AUTHORIZABLES, Security#ZONE_CONTENT + */ + private boolean moveAcl(String from, String to, boolean force) + throws AccessDeniedException, StorageClientException { + String objectType = Security.ZONE_CONTENT; + boolean moved = false; + + // check that we have the same permissions as used in ContentManager.move(..) + accessControlManager.check(Security.ZONE_CONTENT, from, Permissions.CAN_ANYTHING); + accessControlManager.check(Security.ZONE_CONTENT, to, Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + + // get the ACL to move and make the map mutable + Map fromAcl = Maps.newHashMap(accessControlManager.getAcl(objectType, from)); + if (fromAcl != null) { + + // remove the read-only properties to be re-added when setting the new acl + for (String readOnly : ACL_READ_ONLY_PROPERTIES) { + fromAcl.remove(readOnly); + } + + // check for a destination if necessary + if (!force && !accessControlManager.getAcl(objectType, to).isEmpty()) { + throw new StorageClientException("The destination ACL {" + to + + "} exists, move operation failed"); + } + + // parse the ACL and create modifications for the `to` location + List modifications = Lists.newArrayList(); + for (Entry fromAce : fromAcl.entrySet()) { + String aceKey = fromAce.getKey(); + Object aceValue = fromAce.getValue(); + if (aceValue != null) { + try { + int bitmap = (Integer) aceValue; + modifications.add(new AclModification(aceKey, bitmap, Operation.OP_REPLACE)); + } catch (NumberFormatException e) { + LOGGER.info("Skipping corrupt ACE value {} at {}->{}", new Object[] { + aceValue, from, to }); + } + } + } + + // set the ACL on the `to` path + AclModification[] mods = modifications.toArray(new AclModification[modifications.size()]); + accessControlManager.setAcl(objectType, to, mods); + + // remove the old ACLs on the `from` path + for (int i = 0; i < mods.length; i++) { + mods[i] = new AclModification(mods[i].getAceKey(), 0, Operation.OP_DEL); + } + accessControlManager.setAcl(objectType, from, mods); + + moved = true; + } + return moved; + } + + private String lastElement(String dest) { + int i = dest.lastIndexOf('/'); + if ( i == dest.length()-1 ) { + return ""; + } + if (i > -1) { + dest = dest.substring(i+1); + } + i = dest.indexOf('.'); + if (i > -1) { + dest = dest.substring(0, i); + } + return dest; + } +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/content/InternalContent.java b/core/src/main/java/org/sakaiproject/nakamura/lite/content/InternalContent.java similarity index 52% rename from src/main/java/org/sakaiproject/nakamura/lite/content/InternalContent.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/content/InternalContent.java index ed0076c0..f6a99b60 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/content/InternalContent.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/content/InternalContent.java @@ -17,22 +17,26 @@ */ package org.sakaiproject.nakamura.lite.content; -import com.google.common.base.Predicate; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterators; -import com.google.common.collect.Maps; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import org.apache.commons.lang.StringUtils; import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.Repository; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; import org.sakaiproject.nakamura.api.lite.content.Content; -import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Iterator; -import java.util.Map; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterators; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; /** * Internal Content Object for holding sparse Content objects. Has a protected @@ -41,154 +45,178 @@ public class InternalContent { private static final Logger LOGGER = LoggerFactory.getLogger(Content.class); + + public static final String INTERNAL_FIELD_PREFIX = Repository.SYSTEM_PROP_PREFIX + ":"; /** * The ID of a content item */ - public static final String UUID_FIELD = "id"; + public static String UUID_FIELD = Repository.SYSTEM_PROP_PREFIX+ "id"; + + static boolean idFieldIsSet = false; /** * The path of the content item (used in structure row) */ - public static final String PATH_FIELD = "path"; + public static final String PATH_FIELD = Repository.SYSTEM_PROP_PREFIX + "path"; + /** + * The parent path ( + */ + public static final String PARENT_HASH_FIELD = INTERNAL_FIELD_PREFIX + "parenthash"; + /** * content item ID referenced by a Structure item */ - public static final String STRUCTURE_UUID_FIELD = ":cid"; + public static final String STRUCTURE_UUID_FIELD = INTERNAL_FIELD_PREFIX + "cid"; /** - * Where a structure object is a link, this field contains the location of the target of the link + * Where a structure object is a link, this field contains the location of + * the target of the link */ - public static final String LINKED_PATH_FIELD = ":link"; + public static final String LINKED_PATH_FIELD = INTERNAL_FIELD_PREFIX + "link"; /** * BlockID where the body of this content item is stored, if there is a body * (content row) */ - public static final String BLOCKID_FIELD = "blockId"; + public static final String BLOCKID_FIELD = Repository.SYSTEM_PROP_PREFIX + "blockId"; /** * ID of the previous version (content row) */ - public static final String PREVIOUS_VERSION_UUID_FIELD = "previousVersion"; + public static final String PREVIOUS_VERSION_UUID_FIELD = Repository.SYSTEM_PROP_PREFIX + + "previousVersion"; /** * Previous Block ID. (content row) */ - public static final String PREVIOUS_BLOCKID_FIELD = "previousBlockId"; + public static final String PREVIOUS_BLOCKID_FIELD = Repository.SYSTEM_PROP_PREFIX + + "previousBlockId"; /** * The ID of the next version (content row) */ - public static final String NEXT_VERSION_FIELD = "nextVersion"; + public static final String NEXT_VERSION_FIELD = Repository.SYSTEM_PROP_PREFIX + "nextVersion"; /** * Set to "Y" if the content item is read only. (content row) */ - public static final String READONLY_FIELD = "readOnly"; + public static final String READONLY_FIELD = Repository.SYSTEM_PROP_PREFIX + "readOnly"; /** * set to "Y" if deleted. (content row) */ - public static final String DELETED_FIELD = "deleted"; + public static final String DELETED_FIELD = StorageClient.DELETED_FIELD; /** * The block size in bytes in each block in a block set, if body store uses * blocking (content row) */ - public static final String BLOCKSIZE_FIELD = "blocksize"; + public static final String BLOCKSIZE_FIELD = Repository.SYSTEM_PROP_PREFIX + "blocksize"; /** * Total length of the content body (content row) */ - public static final String LENGTH_FIELD = "length"; + public static final String LENGTH_FIELD = Repository.SYSTEM_PROP_PREFIX + "length"; /** * The number of block sets in a body (content row) */ - public static final String NBLOCKS_FIELD = "nblocks"; + public static final String NBLOCKS_FIELD = Repository.SYSTEM_PROP_PREFIX + "nblocks"; /** * Yes, True, etc */ - public static final String TRUE = "Y"; + public static final String TRUE = StorageClient.TRUE; /** * The date (stored as GMT epoch long) the body was last modified. (content * row) */ - public static final String BODY_LAST_MODIFIED = "bodyLastModified"; + public static final String BODY_LAST_MODIFIED_FIELD = Repository.SYSTEM_PROP_PREFIX + + "bodyLastModified"; /** * The user ID that last modified the body. (content row) */ - public static final String BODY_LAST_MODIFIED_BY = "bodyLastModifiedBy"; + public static final String BODY_LAST_MODIFIED_BY_FIELD = Repository.SYSTEM_PROP_PREFIX + + "bodyLastModifiedBy"; /** * The date the body was created (GMT epoch long) (content row) */ - public static final String BODY_CREATED = "bodyCreated"; + public static final String BODY_CREATED_FIELD = Repository.SYSTEM_PROP_PREFIX + "bodyCreated"; /** * The user that created the body. (content row) */ - public static final String BODY_CREATED_BY = "bodyCreatedBy"; + public static final String BODY_CREATED_BY_FIELD = Repository.SYSTEM_PROP_PREFIX + + "bodyCreatedBy"; /** * The time the item was created. (content row) */ - public static final String CREATED = "created"; + public static final String CREATED_FIELD = Repository.SYSTEM_PROP_PREFIX + "created"; /** * The user that created the item. (content row) */ - public static final String CREATED_BY = "createdBy"; + public static final String CREATED_BY_FIELD = Repository.SYSTEM_PROP_PREFIX + "createdBy"; /** * The time the item was last modified. (content row) */ - public static final String LASTMODIFIED = "lastModified"; + public static final String LASTMODIFIED_FIELD = Repository.SYSTEM_PROP_PREFIX + "lastModified"; /** * The user that lastModified the item. (content row) */ - public static final String LASTMODIFIED_BY = "lastModifiedBy"; + public static final String LASTMODIFIED_BY_FIELD = Repository.SYSTEM_PROP_PREFIX + + "lastModifiedBy"; /** * The path the content object was copied from if it was copied */ - public static final String COPIED_FROM_PATH = "copiedFrom"; + public static final String COPIED_FROM_PATH_FIELD = Repository.SYSTEM_PROP_PREFIX + + "copiedFrom"; /** * The ID the content object was copied from. */ - public static final String COPIED_FROM_ID = "copiedFromId"; + public static final String COPIED_FROM_ID_FIELD = Repository.SYSTEM_PROP_PREFIX + + "copiedFromId"; /** * If the copy was deep, then true */ - public static final String COPIED_DEEP = "copiedDeep"; + public static final String COPIED_DEEP_FIELD = Repository.SYSTEM_PROP_PREFIX + "copiedDeep"; /** * Mime type */ - public static final String MIMETYPE = "mimeType"; + public static final String MIMETYPE_FIELD = Repository.SYSTEM_PROP_PREFIX + "mimeType"; /** - * Charset encoding if char based. + * The sling resource type field. */ - public static final String ENCODING = "encoding"; + public static final String SLING_RESOURCE_TYPE_FIELD = "sling:resourceType"; /** - * + * Alternative resource type field. */ - public static final String VERSION_HISTORY_ID_FIELD = "versionHistoryId"; + public static final String RESOURCE_TYPE_FIELD = "resourceType"; /** - * + * Charset encoding if char based. */ - public static final String VERSION_NUMBER = "versionNumber"; - + public static final String ENCODING_FIELD = Repository.SYSTEM_PROP_PREFIX + "encoding"; + /** - * The who this version was saved by + * */ - public static final String VERSION_SAVEDBY = "versionSavedBy"; - - + public static final String VERSION_HISTORY_ID_FIELD = Repository.SYSTEM_PROP_PREFIX + + "versionHistoryId"; + /** + * + */ + public static final String VERSION_NUMBER_FIELD = Repository.SYSTEM_PROP_PREFIX + + "versionNumber"; /** - * Map of the structure object for the content object. + * The who this version was saved by */ - private ImmutableMap structure; + public static final String VERSION_SAVEDBY_FIELD = Repository.SYSTEM_PROP_PREFIX + + "versionSavedBy"; + /** * Map of the content object itself. */ @@ -216,7 +244,6 @@ public class InternalContent { private boolean newcontent; private boolean readOnly; - /** * Create a new Content Object that has not been persisted * @@ -239,31 +266,28 @@ public InternalContent(String path, Map content) { /** * Convert a new content object to an internal version. - * - * @param structure - * the structure object + * * @param contentManager * the content manager now managing this content object. + * + * @param readOnly sets this contentManager to be either read-only or not. */ - void internalize(Map structure, ContentManagerImpl contentManager, boolean readOnly) { - this.structure = ImmutableMap.copyOf(structure); + void internalize(ContentManagerImpl contentManager, boolean readOnly) { this.contentManager = contentManager; updated = false; newcontent = false; this.readOnly = readOnly; } - /** * Reset the object back to its last saved state. */ public void reset(Map updatedMap) { - if ( !readOnly ) { + if (!readOnly) { this.content = ImmutableMap.copyOf(updatedMap); updatedContent.clear(); updated = false; - newcontent = false; - LOGGER.debug("Reset to {} ",updatedMap); + LOGGER.debug("Reset to {} ", updatedMap); } } @@ -280,7 +304,7 @@ public boolean isNew() { * retrieved. */ public boolean isUpdated() { - if ( readOnly ) { + if (readOnly) { return false; } return updated; @@ -293,12 +317,15 @@ public boolean isUpdated() { */ public Map getProperties() { LOGGER.debug("getting properties map {}", content); - return StorageClientUtils.getFilterMap(content, updatedContent, null, null); + return StorageClientUtils.getFilterMap(content, updatedContent, null, null, false); } public Map getPropertiesForUpdate() { - return StorageClientUtils.getFilterMap(content, updatedContent, null, - null); + return StorageClientUtils.getFilterMap(content, updatedContent, null, null, true); + } + + public Map getOriginalProperties() { + return StorageClientUtils.getFilterMap(content, null, null, null, false); } /** @@ -308,31 +335,33 @@ public Map getPropertiesForUpdate() { * the key for the property * @param value * the value for the property in storage format created with - * StorageContentUtils.toStore() + * StorageContentUtils.toStore(). Must not be null. */ public void setProperty(String key, Object value) { - if ( readOnly) { + if (readOnly) { return; } + if (value == null) { + throw new IllegalArgumentException("value must not be null"); + } Object o = content.get(key); - if (!value.equals(o) ) { + if (!value.equals(o)) { updatedContent.put(key, value); updated = true; - } else if ( updatedContent.containsKey(key) && !value.equals(updatedContent.get(key)) ) { + } else if (updatedContent.containsKey(key) && !value.equals(updatedContent.get(key))) { updatedContent.put(key, value); - updated = true; + updated = true; } } - + public void removeProperty(String name) { - if ( readOnly) { + if (readOnly) { return; } setProperty(name, new RemoveProperty()); } - /** * @param key * @return the value of the property in storage format (use @@ -341,29 +370,32 @@ public void removeProperty(String name) { * hasProperty(String key) should be checked for an authoratative * answer. */ - // TODO: Unit test public Object getProperty(String key) { - if ( updatedContent.containsKey(key)) { + if (updatedContent.containsKey(key)) { Object o = updatedContent.get(key); - if ( o instanceof RemoveProperty ) { + if (o instanceof RemoveProperty) { return null; } return o; } - Object o = content.get(key); - if ( o instanceof RemoveProperty ) { + Object o = content.get(key); + if (o instanceof RemoveProperty) { return null; } return o; } + public String getId() { + return (String) content.get(UUID_FIELD); + } + /** * @param key * @return true if the property exists. */ public boolean hasProperty(String key) { - if ( updatedContent.containsKey(key)) { - return !( updatedContent.get(key) instanceof RemoveProperty); + if (updatedContent.containsKey(key)) { + return !(updatedContent.get(key) instanceof RemoveProperty); } return content.containsKey(key) && !(content.get(key) instanceof RemoveProperty); } @@ -379,38 +411,18 @@ public String getPath() { * @return an iterable for all children of this content item. */ public Iterable listChildren() { + if (newcontent) { + return Collections.emptyList(); + } return new Iterable() { public Iterator iterator() { - final Iterator childIterator = listChildPaths().iterator(); - return new PreemptiveIterator() { - Content childContent; - - protected boolean internalHasNext() { - childContent = null; - try { - while (childContent == null && childIterator.hasNext()) { - String child = childIterator.next(); - try { - childContent = contentManager.get(StorageClientUtils.newPath( - path, child)); - } catch (AccessDeniedException e) { - LOGGER.debug("Unable to load {} cause {}", new Object[] { - child, e.getMessage() }, e); - } - - } - } catch (StorageClientException e) { - LOGGER.debug("Unable to load Children cause {}", e.getMessage(), e); - - } - return (childContent != null); - } - - protected Content internalNext() { - return childContent; - } - }; + try { + return contentManager.listChildren(path); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } + return Iterators.emptyIterator(); } }; } @@ -419,15 +431,74 @@ protected Content internalNext() { * @return an iterable of all relative child paths of this object. */ public Iterable listChildPaths() { + if (newcontent) { + return Collections.emptyList(); + } + return new Iterable() { + + public Iterator iterator() { + try { + return contentManager.listChildPaths(path); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } + return Iterators.emptyIterator(); + } + }; + } + + public Iterable listStreams() { + final Set streams = Sets.newHashSet(); + for (Entry e : content.entrySet()) { + String k = e.getKey(); + String[] streamIds = StringUtils.split(k, "/", 2); + if (streamIds.length == 2) { + streams.add(streamIds[1]); + } + } return new Iterable() { public Iterator iterator() { - return Iterators.filter(structure.keySet().iterator(), new Predicate() { - public boolean apply(String input) { - return input.charAt(0) != ':'; - } - }); + return streams.iterator(); } }; } + @Override + public String toString() { + return "Path: " + getPath() + "; Properties: " + getProperties(); + } + + /** + * @deprecated This method sets the ID field for the whole system. Do not + * use. Its been provided to make it possible to configure the + * ID field name used by Sparse to allow Berkley to continue + * running without migration. DO NOT USE, IT WILL HAVE NO + * EFFECT. + * @param idFieldName + */ + public static void setUuidField(String idFieldName) { + if (!idFieldIsSet) { + idFieldIsSet = true; + LOGGER.warn("ID Field is being set to {}, this can only be done once per JVM start ", + idFieldName); + UUID_FIELD = idFieldName; + } else { + LOGGER.warn("ID Field has already been set to {} and cannot be reset. ", idFieldName); + } + } + + + /** + * + * @return true if the content item is deleted, the system does not delete + * content items, it marks items as deleted. This will allow the + * storage layers to maintain an update pattern that is close to + * append only allowing compression of the storage to be + * achieved by background task which may then also remove holes in the storage. + * This is not dissimilar from the way most file systems work. + */ + public boolean isDeleted() { + return TRUE.equals(content.get(DELETED_FIELD)); + } + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/content/PathPrincipalTokenResolver.java b/core/src/main/java/org/sakaiproject/nakamura/lite/content/PathPrincipalTokenResolver.java new file mode 100644 index 00000000..54f0f8d1 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/content/PathPrincipalTokenResolver.java @@ -0,0 +1,72 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.content; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalTokenResolver; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Lists; + +import java.util.List; + +public class PathPrincipalTokenResolver implements PrincipalTokenResolver, + ChainingPrincipalTokenResolver { + + private static final Logger LOGGER = LoggerFactory.getLogger(PathPrincipalTokenResolver.class); + private ContentManager contentManager; + private PrincipalTokenResolver nextTokenResolver; + private String tokenPath; + + public PathPrincipalTokenResolver(String tokenPath, ContentManager contentManager) { + this.contentManager = contentManager; + this.tokenPath = tokenPath; + } + + public List resolveTokens(String principal) { + List tokens = Lists.newArrayList(); + try { + Content token = contentManager.get(StorageClientUtils.newPath(tokenPath, principal)); + if (token != null) { + tokens.add(token); + } + } catch (AccessDeniedException e) { + LOGGER.warn("Unable to get token for user " + e.getMessage()); + } catch (StorageClientException e) { + LOGGER.warn("Unable to get token for user " + e.getMessage(), e); + } + if (nextTokenResolver != null) { + tokens.addAll(nextTokenResolver.resolveTokens(principal)); + } + return tokens; + } + + public void setNextTokenResovler(PrincipalTokenResolver nextTokenResolver) { + this.nextTokenResolver = nextTokenResolver; + } + + public void clearNextTokenResolver() { + this.nextTokenResolver = null; + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/http/SparseSessionTrackerImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/http/SparseSessionTrackerImpl.java new file mode 100644 index 00000000..c6127df3 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/http/SparseSessionTrackerImpl.java @@ -0,0 +1,106 @@ +package org.sakaiproject.nakamura.lite.http; + +import java.io.IOException; +import java.util.Set; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Properties; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.SparseSessionTracker; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Sets; + +@Component(immediate=true, metatype=true) +@Service(value={Filter.class,SparseSessionTracker.class}) +@Properties( value={ + @Property(name="pattern", value="/.*") +}) +public class SparseSessionTrackerImpl implements Filter, SparseSessionTracker { + + + private static final String SESSION_ATTRIBUTE = SparseSessionTrackerImpl.class.getName()+".session"; + private static final Logger LOGGER = LoggerFactory.getLogger(SparseSessionTrackerImpl.class); + + public Session register(Session login, HttpServletRequest request) { + @SuppressWarnings("unchecked") + Set o = (Set) request.getAttribute(SESSION_ATTRIBUTE); + if ( o == null ) { + o = Sets.newLinkedHashSet(); + request.setAttribute(SESSION_ATTRIBUTE, o); + } + o.add(login); + return login; + } + + public Session get(HttpServletRequest request) { + @SuppressWarnings("unchecked") + Set o = (Set) request.getAttribute(SESSION_ATTRIBUTE); + if ( o == null ) { + return null; + } + Session s = null; + // get the one last added to this linkedHashSet. + for ( Session sc : o ) { + s = sc; + } + return s; + } + + public void destroy() { + } + + public void init(FilterConfig config) throws ServletException { + } + + + @SuppressWarnings("unchecked") + public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) + throws IOException, ServletException { + Session[] sessionArray = null; + try { + filterChain.doFilter(request, response); + if ( request instanceof HttpServletRequest) { + HttpServletRequest hrequest = (HttpServletRequest) request; + Set sessions = (Set) hrequest.getAttribute(SESSION_ATTRIBUTE); + if ( sessions != null ) { + hrequest.setAttribute(SESSION_ATTRIBUTE, null); + // commit from the last one + sessionArray = sessions.toArray(new Session[sessions.size()]); + for ( int i = sessionArray.length-1; i >= 0; i--) { + LOGGER.debug("Committing {} ", sessionArray[i]); + sessionArray[i].commit(); + } + } + } + } finally { + if ( sessionArray != null ) { + for ( int i = sessionArray.length-1; i >= 0; i--) { + try { + LOGGER.debug("Logout {} ", sessionArray[i]); + sessionArray[i].logout(); + } catch (ClientPoolException e) { + LOGGER.error(e.getMessage(),e); + } + + } + + } + } + + } + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/lock/Lock.java b/core/src/main/java/org/sakaiproject/nakamura/lite/lock/Lock.java new file mode 100644 index 00000000..813c9abd --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/lock/Lock.java @@ -0,0 +1,95 @@ +package org.sakaiproject.nakamura.lite.lock; + +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; + +public class Lock { + + private static final String PATH_FIELD = "p"; + private static final String USER_FIELD = "u"; + private static final String EXPIRES_FIELD = "x"; + private static final String EXPIRES_AT_FIELD = "a"; + private static final String EXTRA_FIELD = "e"; + private static final String TOKEN_FIELD = "t"; + private Map lockMap; + + /** + * @param path + * @param user + * @param expires + * when the lock will expire in seconds from the time its + * created. + * @param extra + */ + public Lock(String path, String user, long timeoutInSeconds, String extra) { + this(path, user, timeoutInSeconds, extra, StorageClientUtils.insecureHash(System + .currentTimeMillis() + ":" + path + ":" + user + ":" + timeoutInSeconds)); + } + + public Lock(Map lockMap) { + this.lockMap = ImmutableMap.copyOf(lockMap); + } + + public Lock(String path, String user, long timeoutInSeconds, String extra, String token) { + Builder b = ImmutableMap.builder(); + b.put(Lock.PATH_FIELD, path); + b.put(Lock.USER_FIELD, user); + b.put(Lock.EXPIRES_FIELD, timeoutInSeconds); + b.put(Lock.EXPIRES_AT_FIELD, System.currentTimeMillis() + (timeoutInSeconds * 1000L)); + b.put(Lock.EXTRA_FIELD, extra); + b.put(Lock.TOKEN_FIELD, token); + + lockMap = b.build(); + } + + public boolean hasExpired() { + return System.currentTimeMillis() > (Long) lockMap.get(Lock.EXPIRES_AT_FIELD); + } + + public boolean isOwner(String currentUser) { + return currentUser.equals(lockMap.get(Lock.USER_FIELD)); + } + + public Map getProperties() { + return lockMap; + } + + public boolean hasToken(String token) { + return token.equals(lockMap.get(TOKEN_FIELD)); + } + + public String getToken() { + return (String) lockMap.get(TOKEN_FIELD); + } + + public String getOwner() { + return (String) lockMap.get(USER_FIELD); + } + + @Override + public String toString() { + return String.valueOf(lockMap); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof Lock) { + return getToken().equals(((Lock) obj).getToken()); + } + return false; + } + + @Override + public int hashCode() { + return getToken().hashCode(); + } + + public String getExtra() { + return (String) lockMap.get(EXTRA_FIELD); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/lock/LockManagerImpl.java b/core/src/main/java/org/sakaiproject/nakamura/lite/lock/LockManagerImpl.java new file mode 100644 index 00000000..3b610d5f --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/lock/LockManagerImpl.java @@ -0,0 +1,165 @@ +package org.sakaiproject.nakamura.lite.lock; + +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.lock.AlreadyLockedException; +import org.sakaiproject.nakamura.api.lite.lock.LockManager; +import org.sakaiproject.nakamura.api.lite.lock.LockState; +import org.sakaiproject.nakamura.lite.CachingManagerImpl; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LockManagerImpl extends CachingManagerImpl implements LockManager { + + private static final Logger LOGGER = LoggerFactory.getLogger(LockManagerImpl.class); + private String lockColumnFamily; + private String keySpace; + private String currentUser; + + public LockManagerImpl(StorageClient storageClient, Configuration config, User currentUser, Map sharedCache) { + super(storageClient, sharedCache); + this.lockColumnFamily = config.getLockColumnFamily(); + this.keySpace = config.getKeySpace(); + this.currentUser = currentUser.getId(); + } + + public void close() { + } + + private Lock get(String path) throws StorageClientException { + Map lockMap = getCached(keySpace, lockColumnFamily, path); + if ( lockMap != null && lockMap.size() > 0) { + Lock nl = new Lock(lockMap); + LOGGER.debug("Got Lock {} {} ",path, nl); + return nl; + } + return null; + } + + private void clear(String path) throws StorageClientException { + removeCached(keySpace, lockColumnFamily, path); + } + + + public String lock(String path, long expires, String extra) throws StorageClientException, AlreadyLockedException { + String currentPath = path; + for(;;) { + Lock lock = get(currentPath); + if ( lock != null ) { + if ( !lock.hasExpired() ) { + if ( !lock.isOwner(currentUser)) { + throw new AlreadyLockedException(currentPath); + } + } else { + clear(currentPath); + } + } + if ( StorageClientUtils.isRoot(currentPath)) { + break; + } + currentPath = StorageClientUtils.getParentObjectPath(currentPath); + } + Lock newLock = new Lock(path, currentUser, expires, extra); + LOGGER.debug("Applying lock {} {} ",path, newLock); + putCached(keySpace, lockColumnFamily, path, newLock.getProperties() , true); + return newLock.getToken(); + } + + public String refreshLock(String path, long timeoutInSeconds, String extra, String token) throws StorageClientException { + String currentPath = path; + for(;;) { + Lock lock = get(currentPath); + if ( lock != null && !lock.hasExpired() ) { + LOGGER.debug("Lock is not null and has not expired"); + if ( lock.isOwner(currentUser) ) { + if ( lock.hasToken(token)) { + LOGGER.info("Has Owner locked with token {} {} {} {} {}", new Object[]{path, currentUser, timeoutInSeconds, extra, token}); + Lock newLock = new Lock(path, currentUser, timeoutInSeconds, extra, token); + putCached(keySpace, lockColumnFamily, path, newLock.getProperties() , false); + return newLock.getToken(); + } + } + } + if ( StorageClientUtils.isRoot(currentPath)) { + return null; + } + currentPath = StorageClientUtils.getParentObjectPath(currentPath); + } + } + + + public void unlock(String path, String token) throws StorageClientException { + String currentPath = path; + for(;;) { + Lock lock = get(currentPath); + if ( lock != null && !lock.hasExpired() ) { + if ( lock.isOwner(currentUser) && lock.hasToken(token)) { + LOGGER.debug("Clearing lock at {} {} ",currentPath, lock); + clear(currentPath); + } + } + if ( StorageClientUtils.isRoot(currentPath)) { + return; + } + currentPath = StorageClientUtils.getParentObjectPath(currentPath); + } + } + + public LockState getLockState(String path, String token) throws StorageClientException { + String currentPath = path; + for(;;) { + Lock lock = get(currentPath); + if ( lock != null && !lock.hasExpired() ) { + LOGGER.debug("Lock is not null and has not expired"); + if ( lock.isOwner(currentUser) ) { + if ( lock.hasToken(token)) { + LOGGER.debug("Has Owner locked with token"); + return LockState.getOwnerLockedToken(currentPath, currentUser, token, lock.getExtra()); + } else { + LOGGER.debug("Has Owner locked with not token"); + return LockState.getOwnerLockedNoToken(currentPath, currentUser, lock.getToken(), lock.getExtra()); + } + } else { + LOGGER.debug("Has User locked"); + return LockState.getUserLocked(currentPath, lock.getOwner(), lock.getToken(), lock.getExtra()); + } + } else { + LOGGER.debug("Lock is null or has expired {} ",lock); + } + if ( StorageClientUtils.isRoot(currentPath)) { + LOGGER.debug("Has Not locked"); + return LockState.getNotLocked(); + } + currentPath = StorageClientUtils.getParentObjectPath(currentPath); + } + } + + + public boolean isLocked(String path) throws StorageClientException { + String currentPath = path; + for(;;) { + Lock lock = get(currentPath); + if ( lock != null && !lock.hasExpired() ) { + LOGGER.debug("Is Locked {} {} {} ",new Object[]{path, currentPath, lock}); + return true; + } + if ( StorageClientUtils.isRoot(currentPath)) { + LOGGER.debug("Is Not Locked {} ", path); + return false; + } + currentPath = StorageClientUtils.getParentObjectPath(currentPath); + } + } + + @Override + protected Logger getLogger() { + return LOGGER; + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/AbstractIndexer.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/AbstractIndexer.java new file mode 100644 index 00000000..b0792ce8 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/AbstractIndexer.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class AbstractIndexer implements Indexer { + private static final Logger LOGGER = LoggerFactory.getLogger(AbstractIndexer.class); + private Set indexColumns; + + public AbstractIndexer(Set indexColumns2) { + this.indexColumns = indexColumns2; + } + + boolean shouldFind(String keySpace, String columnFamily, String k) { + String key = columnFamily+":"+k; + if ( JDBCStorageClient.AUTO_INDEX_COLUMNS.contains(key) || indexColumns.contains(key)) { + return true; + } else { + LOGGER.debug("Ignoring Find operation on {}:{}", columnFamily, k); + } + return false; + } + boolean shouldIndex(String keySpace, String columnFamily, String k) { + String key = columnFamily+":"+k; + if ( JDBCStorageClient.AUTO_INDEX_COLUMNS.contains(key) || indexColumns.contains(key)) { + LOGGER.debug("Will Index {}:{}", columnFamily, k); + return true; + } else { + LOGGER.debug("Will Not Index {}:{}", columnFamily, k); + return false; + } + } + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClientPool.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/BaseJDBCStorageClientPool.java similarity index 69% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClientPool.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/BaseJDBCStorageClientPool.java index 2a66da61..d4bfa66a 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClientPool.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/BaseJDBCStorageClientPool.java @@ -17,10 +17,20 @@ */ package org.sakaiproject.nakamura.lite.storage.jdbc; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMap.Builder; - -import edu.umd.cs.findbugs.annotations.SuppressWarnings; +import java.io.IOException; +import java.io.InputStream; +import java.io.PrintWriter; +import java.security.NoSuchAlgorithmException; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Enumeration; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Timer; import org.apache.commons.lang.StringUtils; import org.apache.commons.pool.PoolableObjectFactory; @@ -29,36 +39,30 @@ import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; -import org.apache.felix.scr.annotations.ReferenceCardinality; -import org.apache.felix.scr.annotations.ReferencePolicy; -import org.apache.felix.scr.annotations.Service; -import org.sakaiproject.nakamura.api.lite.CacheHolder; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageCacheManager; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; -import org.sakaiproject.nakamura.lite.storage.AbstractClientConnectionPool; -import org.sakaiproject.nakamura.lite.storage.ConcurrentLRUMap; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.AbstractClientConnectionPool; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.InputStream; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; -import java.util.Timer; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; -@Component(immediate = true, metatype = true, inherit = true) -@Service(value = StorageClientPool.class) -public class JDBCStorageClientPool extends AbstractClientConnectionPool { +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +/** + * An base class for JDBC drivers. If you change the OSGi configuration of this + * class you will need to re-build all fragment bundles that contain code extending this. + * @author ieb + * + */ +@Component(componentAbstract = true) +public class BaseJDBCStorageClientPool extends AbstractClientConnectionPool { - private static final Logger LOGGER = LoggerFactory.getLogger(JDBCStorageClientPool.class); + private static final Logger LOGGER = LoggerFactory.getLogger(BaseJDBCStorageClientPool.class); @Property(value = { "jdbc:derby:sling/sparsemap/db;create=true" }) public static final String CONNECTION_URL = "jdbc-url"; @@ -66,12 +70,16 @@ public class JDBCStorageClientPool extends AbstractClientConnectionPool { public static final String JDBC_DRIVER = "jdbc-driver"; @Property(value = { "sa" }) - private static final String USERNAME = "username"; + public static final String USERNAME = "username"; @Property(value = { "" }) - private static final String PASSWORD = "password"; + public static final String PASSWORD = "password"; - @Reference(cardinality=ReferenceCardinality.OPTIONAL_UNARY, policy=ReferencePolicy.DYNAMIC) - private StorageCacheManager storageManagerCache; + /** + * Clients should provide an implementation of NamedCacheManager in preference to this cache manager. + */ + @Reference + public StorageCacheManager storageManagerCache; + private static final String BASESQLPATH = "org/sakaiproject/nakamura/lite/storage/jdbc/config/client"; @@ -88,13 +96,12 @@ public void activateObject(Object obj) throws Exception { public void destroyObject(Object obj) throws Exception { JDBCStorageClient client = (JDBCStorageClient) obj; - client.close(); - + client.destroy(); } public Object makeObject() throws Exception { - return checkSchema(new JDBCStorageClient(JDBCStorageClientPool.this, properties, - getSqlConfig(getConnection()))); + return checkSchema(new JDBCStorageClient(BaseJDBCStorageClientPool.this, properties, + getSqlConfig(), getIndexColumns(), getIndexColumnsTypes(), getIndexColumnsNames(), true )); } public void passivateObject(Object obj) throws Exception { @@ -130,61 +137,90 @@ public boolean validateObject(Object obj) { private Timer timer; - private StorageCacheManager defaultStorageManagerCache; - - private Map sharedCache; - + private Map indexColumnsMap; + @Override @Activate @SuppressWarnings(value={"NP_CLOSING_NULL"},justification="Invalid report, if this was the case then nothing would work") public void activate(Map properties) throws ClassNotFoundException { this.properties = properties; super.activate(properties); - connectionManager = new ConnectionManager(); + connectionManager = new ConnectionManager(this); timer = new Timer(); timer.schedule(connectionManager, 30000L, 30000L); - sharedCache = new ConcurrentLRUMap(10000); // this is a default cache used where none has been provided. - defaultStorageManagerCache = new StorageCacheManager() { - - public Map getContentCache() { - return sharedCache; - } - - public Map getAuthorizableCache() { - return sharedCache; - } - - public Map getAccessControlCache() { - return sharedCache; - } - }; - - String jdbcDriver = (String) properties.get(JDBC_DRIVER); - Class clazz = Class.forName(jdbcDriver); + if ( LOGGER.isDebugEnabled()) { + DriverManager.setLogWriter(new PrintWriter(System.err)); + } + String jdbcDriver = StorageClientUtils.getSetting(properties.get(JDBC_DRIVER),""); + Class driverClass = this.getClass().getClassLoader().loadClass(jdbcDriver); + if ( driverClass != null ) { + LOGGER.info("Loaded Driver Class {} with classloader {} ", driverClass, driverClass.getClassLoader()); + try { + Driver d = (Driver) driverClass.newInstance(); + LOGGER.info("Created Driver Instance as {} ", d); + } catch (InstantiationException e) { + LOGGER.info("Error Creating Driver {} ", driverClass, e); + } catch (IllegalAccessException e) { + LOGGER.info("Error Creating Driver {} ", driverClass, e); + } + } else { + LOGGER.error("Failed to Load the DB Driver {}, unless the driver is available in the core bundle, it probably wont be found.", jdbcDriver); + } connectionProperties = getConnectionProperties(properties); username = StorageClientUtils.getSetting(properties.get(USERNAME), ""); password = StorageClientUtils.getSetting(properties.get(PASSWORD), ""); url = StorageClientUtils.getSetting(properties.get(CONNECTION_URL), ""); - LOGGER.info("Loaded Database Driver {} as {} ", jdbcDriver, clazz); + LOGGER.info("Loaded Database Driver {} as {} ", jdbcDriver, driverClass); + boolean registered = false; + for ( Enumeration ed = DriverManager.getDrivers(); ed.hasMoreElements();) { + registered = true; + Driver d = ed.nextElement(); + LOGGER.info("JDBC Driver Registration [{}] [{}] [{}] ", new Object[]{d, d.getClass(), d.getClass().getClassLoader()}); + } + if ( !registered ) { + LOGGER.warn("The SQL Driver has no drivers registered, did you ensure that that your Driver started up before this bundle ?"); + } JDBCStorageClient client = null; try { - client = (JDBCStorageClient) getClient(); + // dont use the pool, we dont want this client to be in the pool. + client = new JDBCStorageClient(this, properties, + getSqlConfig(), getIndexColumns(), getIndexColumnsTypes(), getIndexColumnsNames(), false ); + client = checkSchema(client); if (client == null) { LOGGER.warn("Failed to check Schema, no connection"); } } catch (ClientPoolException e) { LOGGER.warn("Failed to check Schema", e); + } catch (NoSuchAlgorithmException e) { + LOGGER.warn("Failed to check Schema", e); + } catch (SQLException e) { + LOGGER.warn("Failed to check Schema", e); + } catch (StorageClientException e) { + LOGGER.warn("Failed to check Schema", e); } finally { - client.close(); + if (client != null) { + // do not close as this will add the client into the pool. + client.passivate(); + client.destroy(); + } } } + + + + public Map getIndexColumnsNames() { + return indexColumnsMap; + } + + + @Override @Deactivate public void deactivate(Map properties) { super.deactivate(properties); @@ -192,8 +228,8 @@ public void deactivate(Map properties) { timer.cancel(); connectionManager.close(); - String connectionUrl = (String) this.properties.get(CONNECTION_URL); - String jdbcDriver = (String) properties.get(JDBC_DRIVER); + String connectionUrl = StorageClientUtils.getSetting(this.properties.get(CONNECTION_URL),""); + String jdbcDriver = StorageClientUtils.getSetting(properties.get(JDBC_DRIVER),""); if ("org.apache.derby.jdbc.EmbeddedDriver".equals(jdbcDriver) && connectionUrl != null) { // need to shutdown this instance. String[] parts = StringUtils.split(connectionUrl, ';'); @@ -234,6 +270,7 @@ protected JDBCStorageClient checkSchema(Object o) { LOGGER.info(" Database URL : {} ", properties.get(CONNECTION_URL)); client.checkSchema(getClientConfigLocations(client.getConnection())); schemaHasBeenChecked = true; + indexColumnsMap = client.syncIndexColumns(); } catch (Throwable e) { LOGGER.warn("Failed to check Schema", e); } @@ -242,11 +279,17 @@ protected JDBCStorageClient checkSchema(Object o) { return client; } + public Map getSqlConfig() { + return getSqlConfig(null); + } + public Map getSqlConfig(Connection connection) { synchronized (sqlConfigLock) { if (sqlConfig == null) { try { - + if ( connection == null ) { + connection = getConnection(); + } for (String clientSQLLocation : getClientConfigLocations(connection)) { String clientConfig = clientSQLLocation + ".sql"; InputStream in = this.getClass().getClassLoader() @@ -271,7 +314,7 @@ public Map getSqlConfig(Connection connection) { } } } catch (SQLException e) { - LOGGER.error("Failed to locate SQL configuration"); + LOGGER.error("Failed to locate SQL configuration ",e); } } } @@ -294,7 +337,10 @@ private String[] getClientConfigLocations(Connection connection) throws SQLExcep private Properties getConnectionProperties(Map config) { Properties connectionProperties = new Properties(); for (Entry e : config.entrySet()) { - connectionProperties.put(e.getKey(), e.getValue()); + // dont add the configuration object that might be in the properties while unit testing. + if ( !(e.getValue() instanceof Configuration) ) { + connectionProperties.put(e.getKey(), e.getValue()); + } } return connectionProperties; } @@ -305,13 +351,7 @@ protected PoolableObjectFactory getConnectionPoolFactory() { } public StorageCacheManager getStorageCacheManager() { - if ( storageManagerCache != null ) { - if ( sharedCache.size() > 0 ) { - sharedCache.clear(); // dump any memory consumed by the default cache. - } - return storageManagerCache ; - } - return defaultStorageManagerCache; + return storageManagerCache; } public Connection getConnection() throws SQLException { @@ -322,9 +362,21 @@ public Connection getConnection() throws SQLException { } else { connection = DriverManager.getConnection(url, username, password); } + connection.setAutoCommit(true); // KERN-1691 connectionManager.set(connection); } return connection; } + public void resetConnection() { + connectionManager.clean(); + } + + public String getValidationSql() { + if ( sqlConfig != null ) { + return (String) sqlConfig.get("validate"); + } + return null; + } + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/BatchInsertIndexer.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/BatchInsertIndexer.java new file mode 100644 index 00000000..3644f18d --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/BatchInsertIndexer.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Map.Entry; + +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +public class BatchInsertIndexer extends KeyValueIndexer { + + private static final Logger LOGGER = LoggerFactory.getLogger(BatchInsertIndexer.class); + + public BatchInsertIndexer(JDBCStorageClient jdbcStorageClient, Set indexColumns, Map sqlConfig) { + super(jdbcStorageClient, indexColumns, sqlConfig); + } + + public void index(Map statementCache, String keySpace, String columnFamily, String key, String rid, Map values) throws StorageClientException, SQLException { + Set removeSet = Sets.newHashSet(); + // execute the updates and add the necessary inserts. + Map>> insertSequence = Maps + .newHashMap(); + + Set insertSet = Sets.newHashSet(); + + for (Entry e : values.entrySet()) { + String k = e.getKey(); + Object o = e.getValue(); + if (shouldIndex(keySpace, columnFamily, k)) { + if ( o instanceof RemoveProperty || o == null ) { + PreparedStatement removeStringColumn = client.getStatement(keySpace, + columnFamily, JDBCStorageClient.SQL_REMOVE_STRING_COLUMN, rid, statementCache); + removeStringColumn.setString(1, rid); + removeStringColumn.setString(2, k); + removeStringColumn.addBatch(); + removeSet.add(removeStringColumn); + } else { + // remove all previous values + PreparedStatement removeStringColumn = client.getStatement(keySpace, + columnFamily, JDBCStorageClient.SQL_REMOVE_STRING_COLUMN, rid, statementCache); + removeStringColumn.setString(1, rid); + removeStringColumn.setString(2, k); + removeStringColumn.addBatch(); + removeSet.add(removeStringColumn); + // insert new values, as we just removed them we know we can insert, no need to attempt update + // the only thing that we know is the colum value changes so we have to re-index the whole + // property + Object[] valueMembers = (o instanceof Object[]) ? (Object[]) o : new Object[] { o }; + for (Object ov : valueMembers) { + String valueMember = ov.toString(); + PreparedStatement insertStringColumn = client.getStatement(keySpace, + columnFamily, JDBCStorageClient.SQL_INSERT_STRING_COLUMN, rid, statementCache); + insertStringColumn.setString(1, valueMember); + insertStringColumn.setString(2, rid); + insertStringColumn.setString(3, k); + insertStringColumn.addBatch(); + LOGGER.debug("Insert Index {} {}", k, valueMember); + insertSet.add(insertStringColumn); + List> insertSeq = insertSequence + .get(insertStringColumn); + if (insertSeq == null) { + insertSeq = Lists.newArrayList(); + insertSequence.put(insertStringColumn, insertSeq); + } + insertSeq.add(e); + } + } + } + } + + if ( !StorageClientUtils.isRoot(key)) { + // create a holding map containing a rowhash of the parent and then process the entry to generate a update operation. + Map autoIndexMap = ImmutableMap.of(Content.PARENT_HASH_FIELD, (Object)client.rowHash(keySpace, columnFamily, StorageClientUtils.getParentObjectPath(key))); + for ( Entry e : autoIndexMap.entrySet()) { + // remove all previous values + PreparedStatement removeStringColumn = client.getStatement(keySpace, + columnFamily, JDBCStorageClient.SQL_REMOVE_STRING_COLUMN, rid, statementCache); + removeStringColumn.setString(1, rid); + removeStringColumn.setString(2, e.getKey()); + removeStringColumn.addBatch(); + removeSet.add(removeStringColumn); + PreparedStatement insertStringColumn = client.getStatement(keySpace, + columnFamily, JDBCStorageClient.SQL_INSERT_STRING_COLUMN, rid, statementCache); + insertStringColumn.setString(1, (String)e.getValue()); + insertStringColumn.setString(2, rid); + insertStringColumn.setString(3, e.getKey()); + insertStringColumn.addBatch(); + LOGGER.debug("Insert {} {}", e.getKey(), e.getValue()); + insertSet.add(insertStringColumn); + List> insertSeq = insertSequence + .get(insertStringColumn); + if (insertSeq == null) { + insertSeq = Lists.newArrayList(); + insertSequence.put(insertStringColumn, insertSeq); + } + insertSeq.add(e); + } + } + + LOGGER.debug("Remove set {}", removeSet); + + for (PreparedStatement pst : removeSet) { + pst.executeBatch(); + } + + LOGGER.debug("Insert set {}", insertSet); + for (PreparedStatement pst : insertSet) { + int[] res = pst.executeBatch(); + List> insertSeq = insertSequence.get(pst); + for (int i = 0; i < res.length; i++ ) { + Entry e = insertSeq.get(i); + if ( res[i] <= 0 && res[i] != -2 ) { // Oracle drivers respond with -2 on a successful insert when the number is not known http://download.oracle.com/javase/1.3/docs/guide/jdbc/spec2/jdbc2.1.frame6.html + LOGGER.warn("Index failed for {} {} ", new Object[] { rid, e.getKey(), + e.getValue() }); + + } else { + LOGGER.debug("Index inserted for {} {} ", new Object[] { rid, e.getKey(), + e.getValue() }); + + } + } + } + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/CachingIndexer.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/CachingIndexer.java new file mode 100644 index 00000000..c7e6572c --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/CachingIndexer.java @@ -0,0 +1,21 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.util.Map; + +/** + * Caching indexers cache result sets and have a method to allow external + * classes to invalidate cache rows. + * + * @author ieb + * + */ +public interface CachingIndexer { + + /** + * Invalidate a cache entry, based on the query properties. + * @param keyspace + * @param columnFamily + * @param queryProperties + */ + void invalidate(String keyspace, String columnFamily, Map queryProperties); +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionHolder.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionHolder.java new file mode 100644 index 00000000..6bb58334 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionHolder.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +public class ConnectionHolder { + + private static final Logger LOGGER = LoggerFactory.getLogger(ConnectionHolder.class); + private static final long TTL = 3600000L; + private static final long validateWait = 120000L; + private Connection connection; + private long lastUsed; + private long lastValidated; + private BaseJDBCStorageClientPool jdbcStorageClientPool; + + public ConnectionHolder(Connection connection, BaseJDBCStorageClientPool jdbcStorageClientPool) { + this.lastUsed = System.currentTimeMillis(); + this.lastValidated = 0L; // force the connection to get validated, even if its new. + this.connection = connection; + this.jdbcStorageClientPool = jdbcStorageClientPool; + } + + public void ping() { + lastUsed = System.currentTimeMillis(); + } + + public boolean hasExpired() { + //add validity check + /* + if enough time has elapsed + run validation query + on any exception return false; + */ + long now = System.currentTimeMillis(); + + if (now > lastValidated + validateWait) { + boolean valid = false; + Statement s = null; + ResultSet rs = null; + try { + s = connection.createStatement(); + String validationSql = jdbcStorageClientPool.getValidationSql(); + if ( validationSql != null ) { + rs = s.executeQuery(validationSql); + if (rs.next()) { + valid = true; + } + } else if ( lastValidated == 0L ) { + LOGGER.warn("No Validation SQL has been set in the SQL configuration, connections may randomly fail"); + valid = true; + } + } catch (Throwable e) { + LOGGER.warn("Error running validation query", e); + } finally { + try { + rs.close(); + } catch (Throwable t) { + LOGGER.debug(t.getMessage(), t); + } + try { + s.close(); + } catch (Throwable t) { + LOGGER.debug(t.getMessage(), t); + } + } + + if (!valid) + return true; + + lastValidated = now; + } + + return (now > lastUsed + TTL); + } + + public Connection get() { + if (hasExpired()) return null; + + ping(); + + return connection; + } + + public void close() { + if (connection != null) { + try { + connection.close(); + } catch (SQLException e) { + LOGGER.debug("Failed to close connection " + e.getMessage(), e); + } + } + } + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionManager.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionManager.java similarity index 75% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionManager.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionManager.java index daf0f167..71cc7382 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionManager.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionManager.java @@ -25,6 +25,13 @@ public class ConnectionManager extends TimerTask { private Map threadMap = new ConcurrentHashMap(); + private boolean closing = false; + private BaseJDBCStorageClientPool jdbcStorageClientPool; + + + public ConnectionManager(BaseJDBCStorageClientPool jdbcStorageClientPool) { + this.jdbcStorageClientPool = jdbcStorageClientPool; + } @Override public void run() { @@ -32,16 +39,21 @@ public void run() { } public Connection get() { + if ( closing ) { + return null; + } Thread t = Thread.currentThread(); ConnectionHolder ch = threadMap.get(t); - if (ch != null && ch.get() != null) { - ch.ping(); + if (ch != null) { return ch.get(); } return null; } public void set(Connection connection) { + if ( closing ) { + throw new IllegalStateException("ConnectionManager is closing "); + } cleanThreadMap(); Thread t = Thread.currentThread(); ConnectionHolder c = threadMap.get(t); @@ -49,11 +61,24 @@ public void set(Connection connection) { c.close(); threadMap.remove(t); } - ConnectionHolder ch = new ConnectionHolder(connection); + ConnectionHolder ch = new ConnectionHolder(connection, jdbcStorageClientPool); threadMap.put(t, ch); } + public void clean() { + Thread t = Thread.currentThread(); + threadMap.get(t); + ConnectionHolder c = threadMap.get(t); + if (c != null) { + c.close(); + threadMap.remove(t); + } + } + private void cleanThreadMap() { + if ( closing ) { + return; + } Thread[] copy = threadMap.keySet().toArray(new Thread[threadMap.size()]); for (Thread t : copy) { if (!t.isAlive()) { @@ -75,7 +100,9 @@ private void cleanThreadMap() { } public void close() { + closing = true; while (threadMap.size() > 0) { + Thread[] copy = threadMap.keySet().toArray(new Thread[threadMap.size()]); for (Thread t : copy) { ConnectionHolder ch = threadMap.remove(t); diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/CounterContext.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/CounterContext.java new file mode 100644 index 00000000..fbe98971 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/CounterContext.java @@ -0,0 +1,9 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.util.concurrent.atomic.AtomicInteger; + +public interface CounterContext { + + AtomicInteger get(String key); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/Indexer.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/Indexer.java new file mode 100644 index 00000000..97a954d6 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/Indexer.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; + +public interface Indexer { + + void index(Map statementCache, + String keySpace, String columnFamily, String key, String rid, Map values) + throws StorageClientException, SQLException; + + DisposableIterator> find(String keySpace, String columnFamily, + Map properties, DirectCacheAccess cachingManager) throws StorageClientException; + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClient.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClient.java new file mode 100644 index 00000000..ef6f1286 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClient.java @@ -0,0 +1,1275 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.UTFDataFormatException; +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.text.MessageFormat; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.DataFormatException; +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.StorageCacheManager; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.StorageConstants; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.sakaiproject.nakamura.lite.storage.spi.Disposable; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.Disposer; +import org.sakaiproject.nakamura.lite.storage.spi.RowHasher; +import org.sakaiproject.nakamura.lite.storage.spi.SparseMapRow; +import org.sakaiproject.nakamura.lite.storage.spi.SparseRow; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientListener; +import org.sakaiproject.nakamura.lite.storage.spi.content.FileStreamContentHelper; +import org.sakaiproject.nakamura.lite.storage.spi.content.StreamedContentHelper; +import org.sakaiproject.nakamura.lite.storage.spi.types.Types; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +public class JDBCStorageClient implements StorageClient, RowHasher, Disposer { + + + + + + public class SlowQueryLogger { + // only used to define the logger. + } + + private static final String INVALID_DATA_ERROR = "Data invalid for storage."; + private static final Logger LOGGER = LoggerFactory.getLogger(JDBCStorageClient.class); + static final Logger SQL_LOGGER = LoggerFactory.getLogger(SlowQueryLogger.class); + private static final String SQL_VALIDATE = "validate"; + private static final String SQL_CHECKSCHEMA = "check-schema"; + private static final String SQL_NAME_PADDING = "sql-name-padding"; + private static final String SQL_MAX_NAME_LENGTH = "sql-max-name-length"; + private static final String SQL_COMMENT = "#"; + private static final String SQL_EOL = ";"; + public static final String SQL_INDEX_COLUMN_NAME_SELECT = "index-column-name-select"; + private static final String SQL_INDEX_COLUMN_NAME_INSERT = "index-column-name-insert"; + static final String SQL_DELETE_STRING_ROW = "delete-string-row"; + static final String SQL_INSERT_STRING_COLUMN = "insert-string-column"; + static final String SQL_REMOVE_STRING_COLUMN = "remove-string-column"; + + static final String SQL_BLOCK_DELETE_ROW = "block-delete-row"; + static final String SQL_BLOCK_SELECT_ROW = "block-select-row"; + static final String SQL_BLOCK_INSERT_ROW = "block-insert-row"; + static final String SQL_BLOCK_UPDATE_ROW = "block-update-row"; + + private static final String PROP_HASH_ALG = "rowid-hash"; + private static final String USE_BATCH_INSERTS = "use-batch-inserts"; + private static final String JDBC_SUPPORT_LEVEL = "jdbc-support-level"; + private static final String SQL_STATEMENT_SEQUENCE = "sql-statement-sequence"; + private static final String UPDATE_FIRST_SEQUENCE = "updateFirst"; + private static final Object SLOW_QUERY_THRESHOLD = "slow-query-time"; + private static final Object VERY_SLOW_QUERY_THRESHOLD = "very-slow-query-time"; + /** + * A set of columns that are indexed to allow operations within the driver. + */ + static final Set AUTO_INDEX_COLUMNS_TYPES = ImmutableSet.of( + "cn:_:parenthash=String", + "au:_:parenthash=String", + "ac:_:parenthash=String"); + static final Set AUTO_INDEX_COLUMNS = ImmutableSet.of( + "cn:_:parenthash", + "au:_:parenthash", + "ac:_:parenthash"); + private static final Map COLUMN_NAME_MAPPING = ImmutableMap.of("_:parenthash","parenthash"); + + private BaseJDBCStorageClientPool jdbcStorageClientConnection; + private Map sqlConfig; + private boolean active; + private StreamedContentHelper streamedContentHelper; + private List toDispose = Lists.newArrayList(); + private Exception destroyed; + private Exception passivate; + private String rowidHash; + private Map counters = Maps.newConcurrentMap(); + private Set indexColumns; + private Indexer indexer; + private long slowQueryThreshold; + private long verySlowQueryThreshold; + private Object desponseLock = new Object(); + private StorageClientListener storageClientListener; + private boolean sqlNamePadding; + private int maxNameLength; + + public JDBCStorageClient(BaseJDBCStorageClientPool jdbcStorageClientConnectionPool, + Map properties, Map sqlConfig, Set indexColumns, Set indexColumnTypes, Map indexColumnsNames, boolean enforceWideColums) throws SQLException, + NoSuchAlgorithmException, StorageClientException { + if ( jdbcStorageClientConnectionPool == null ) { + throw new StorageClientException("Null Connection Pool, cant create Client"); + } + if ( properties == null ) { + throw new StorageClientException("Null Connection Properties, cant create Client"); + } + if ( sqlConfig == null ) { + throw new StorageClientException("Null SQL COnfiguration, cant create Client"); + } + if ( indexColumns == null ) { + throw new StorageClientException("Null Index Colums, cant create Client"); + } + this.jdbcStorageClientConnection = jdbcStorageClientConnectionPool; + streamedContentHelper = new FileStreamContentHelper(this, properties); + + this.sqlConfig = sqlConfig; + this.indexColumns = indexColumns; + rowidHash = getSql(PROP_HASH_ALG); + if (rowidHash == null) { + rowidHash = "MD5"; + } + this.sqlNamePadding = Boolean.parseBoolean(StorageClientUtils.getSetting(getSql(SQL_NAME_PADDING),"false")); + this.maxNameLength = Integer.parseInt(StorageClientUtils.getSetting(getSql(SQL_MAX_NAME_LENGTH),"50")); + active = true; + if ( indexColumnsNames != null ) { + LOGGER.debug("Using Wide Columns" ); + indexer = new WideColumnIndexer(this,indexColumnsNames, indexColumnTypes, sqlConfig); + } else if ("1".equals(getSql(USE_BATCH_INSERTS))) { + if ( enforceWideColums ) { + LOGGER.warn("Batch Narrow Column Indexes are deprecated as of 1.5, please check your database and/or configuration, support will be removed in future releases" ); + } + indexer = new BatchInsertIndexer(this, indexColumns, sqlConfig); + } else { + if ( enforceWideColums ) { + LOGGER.warn("Narrow Column Indexes are deprecated as of 1.5, please check your database and/or configuration, support will be removed in future releases" ); + } + indexer = new NonBatchInsertIndexer(this, indexColumns, sqlConfig); + } + + slowQueryThreshold = 50L; + verySlowQueryThreshold = 100L; + if (sqlConfig.containsKey(SLOW_QUERY_THRESHOLD)) { + slowQueryThreshold = Long.parseLong((String)sqlConfig.get(SLOW_QUERY_THRESHOLD)); + } + if (sqlConfig.containsKey(VERY_SLOW_QUERY_THRESHOLD)) { + verySlowQueryThreshold = Long.parseLong((String)sqlConfig.get(VERY_SLOW_QUERY_THRESHOLD)); + } + + } + + public Map get(String keySpace, String columnFamily, String key) + throws StorageClientException { + checkActive(); + String rid = rowHash(keySpace, columnFamily, key); + return internalGet(keySpace, columnFamily, rid, null); // gets through this route should have already consulted the cache. + } + Map internalGet(String keySpace, String columnFamily, String rid, DirectCacheAccess cachingManager) throws StorageClientException { + if ( cachingManager != null ) { + CacheHolder ch = cachingManager.getFromCache(rid); + if ( ch != null ) { + Map cached = ch.get(); + if ( cached == null ) { + // the cache was an empty object, we respond with empty. + cached = Maps.newHashMap(); + } + return cached; + } + } + ResultSet body = null; + Map result = Maps.newHashMap(); + PreparedStatement selectStringRow = null; + try { + boolean hasRetried = false; + for (;;) { + try { + selectStringRow = getStatement(keySpace, columnFamily, SQL_BLOCK_SELECT_ROW, rid, null); + inc("A"); + selectStringRow.clearWarnings(); + selectStringRow.clearParameters(); + selectStringRow.setString(1, rid); + long t1 = System.currentTimeMillis(); + body = selectStringRow.executeQuery(); + inc("B"); + if (body.next()) { + Types.loadFromStream(rid, result, body.getBinaryStream(1), columnFamily); + } + break; + } catch (SQLException ex) { + if (!hasRetried) { + resetConnection(null); + hasRetried = true; + } else { + throw ex; + } + + } + } + } catch (SQLException e) { + LOGGER.warn("Failed to perform get operation on " + keySpace + ":" + columnFamily + + ":" + rid, e); + if (passivate != null) { + LOGGER.warn("Was Pasivated ", passivate); + } + if (destroyed != null) { + LOGGER.warn("Was Destroyed ", destroyed); + } + throw new StorageClientException(e.getMessage(), e); + } catch (IOException e) { + LOGGER.warn("Failed to perform get operation on " + keySpace + ":" + columnFamily + + ":" + rid, e); + if (passivate != null) { + LOGGER.warn("Was Pasivated ", passivate); + } + if (destroyed != null) { + LOGGER.warn("Was Destroyed ", destroyed); + } + throw new StorageClientException(e.getMessage(), e); + } finally { + close(body, "B"); + close(selectStringRow, "A"); + } + if ( cachingManager != null ) { + cachingManager.putToCache(rid, new CacheHolder(result),true); + } + return result; + } + + public String rowHash(String keySpace, String columnFamily, String key) + throws StorageClientException { + MessageDigest hasher; + try { + hasher = MessageDigest.getInstance(rowidHash); + } catch (NoSuchAlgorithmException e1) { + throw new StorageClientException("Unable to get hash algorithm " + e1.getMessage(), e1); + } + String keystring = keySpace + ":" + columnFamily + ":" + key; + byte[] ridkey; + try { + ridkey = keystring.getBytes("UTF8"); + } catch (UnsupportedEncodingException e) { + ridkey = keystring.getBytes(); + } + return StorageClientUtils.encode(hasher.digest(ridkey)); + } + + public void insert(String keySpace, String columnFamily, String key, Map values, boolean probablyNew) + throws StorageClientException { + checkActive(); + + Map statementCache = Maps.newHashMap(); + boolean autoCommit = true; + try { + autoCommit = startBlock(); + String rid = rowHash(keySpace, columnFamily, key); + for (Entry e : values.entrySet()) { + String k = e.getKey(); + Object o = e.getValue(); + if (o instanceof byte[]) { + throw new RuntimeException("Invalid content in " + k + + ", storing byte[] rather than streaming it"); + } + } + + Map m = get(keySpace, columnFamily, key); + if ( storageClientListener != null ) { + storageClientListener.before(keySpace,columnFamily,key,m); + } + if ( TRUE.equals(m.get(DELETED_FIELD)) ) { + // if the map was previously deleted, delete all content since we don't want the old map becoming part of the new map. + m.clear(); + } + for (Entry e : values.entrySet()) { + String k = e.getKey(); + Object o = e.getValue(); + + if (o instanceof RemoveProperty || o == null) { + m.remove(k); + } else { + m.put(k, o); + } + } + if ( storageClientListener != null ) { + storageClientListener.after(keySpace,columnFamily,key,m); + } + LOGGER.debug("Saving {} {} {} ", new Object[]{key, rid, m}); + if ( probablyNew && !UPDATE_FIRST_SEQUENCE.equals(getSql(SQL_STATEMENT_SEQUENCE))) { + PreparedStatement insertBlockRow = getStatement(keySpace, columnFamily, + SQL_BLOCK_INSERT_ROW, rid, statementCache); + insertBlockRow.clearWarnings(); + insertBlockRow.clearParameters(); + insertBlockRow.setString(1, rid); + InputStream insertStream = null; + try { + insertStream = Types.storeMapToStream(rid, m, columnFamily); + } catch (UTFDataFormatException e) { + throw new DataFormatException(INVALID_DATA_ERROR, e); + } + if ("1.5".equals(getSql(JDBC_SUPPORT_LEVEL))) { + insertBlockRow.setBinaryStream(2, insertStream, insertStream.available()); + } else { + insertBlockRow.setBinaryStream(2, insertStream); + } + int rowsInserted = 0; + try { + long t1 = System.currentTimeMillis(); + rowsInserted = insertBlockRow.executeUpdate(); + checkSlow(t1, getSql(keySpace, columnFamily,SQL_BLOCK_INSERT_ROW)); + } catch ( SQLException e ) { + LOGGER.debug(e.getMessage(),e); + } + if ( rowsInserted == 0 ) { + PreparedStatement updateBlockRow = getStatement(keySpace, columnFamily, + SQL_BLOCK_UPDATE_ROW, rid, statementCache); + updateBlockRow.clearWarnings(); + updateBlockRow.clearParameters(); + updateBlockRow.setString(2, rid); + try { + insertStream = Types.storeMapToStream(rid, m, columnFamily); + } catch (UTFDataFormatException e) { + throw new DataFormatException(INVALID_DATA_ERROR, e); + } + if ("1.5".equals(getSql(JDBC_SUPPORT_LEVEL))) { + updateBlockRow.setBinaryStream(1, insertStream, insertStream.available()); + } else { + updateBlockRow.setBinaryStream(1, insertStream); + } + long t = System.currentTimeMillis(); + int u = updateBlockRow.executeUpdate(); + checkSlow(t, getSql(keySpace, columnFamily, SQL_BLOCK_UPDATE_ROW)); + if( u == 0) { + throw new StorageClientException("Failed to save " + rid); + } else { + LOGGER.debug("Updated {} ", rid); + } + } else { + LOGGER.debug("Inserted {} ", rid); + } + } else { + PreparedStatement updateBlockRow = getStatement(keySpace, columnFamily, + SQL_BLOCK_UPDATE_ROW, rid, statementCache); + updateBlockRow.clearWarnings(); + updateBlockRow.clearParameters(); + updateBlockRow.setString(2, rid); + InputStream updateStream = null; + try { + updateStream = Types.storeMapToStream(rid, m, columnFamily); + } catch (UTFDataFormatException e) { + throw new DataFormatException(INVALID_DATA_ERROR, e); + } + if ("1.5".equals(getSql(JDBC_SUPPORT_LEVEL))) { + updateBlockRow.setBinaryStream(1, updateStream, updateStream.available()); + } else { + updateBlockRow.setBinaryStream(1, updateStream); + } + long t = System.currentTimeMillis(); + int u = updateBlockRow.executeUpdate(); + checkSlow(t, getSql(keySpace, columnFamily, SQL_BLOCK_UPDATE_ROW)); + if (u == 0) { + PreparedStatement insertBlockRow = getStatement(keySpace, columnFamily, + SQL_BLOCK_INSERT_ROW, rid, statementCache); + insertBlockRow.clearWarnings(); + insertBlockRow.clearParameters(); + insertBlockRow.setString(1, rid); + try { + updateStream = Types.storeMapToStream(rid, m, columnFamily); + } catch (UTFDataFormatException e) { + throw new DataFormatException(INVALID_DATA_ERROR, e); + } + if ("1.5".equals(getSql(JDBC_SUPPORT_LEVEL))) { + insertBlockRow.setBinaryStream(2, updateStream, updateStream.available()); + } else { + insertBlockRow.setBinaryStream(2, updateStream); + } + t = System.currentTimeMillis(); + u = insertBlockRow.executeUpdate(); + checkSlow(t, getSql(keySpace, columnFamily, SQL_BLOCK_INSERT_ROW)); + + if (u == 0) { + throw new StorageClientException("Failed to save " + rid); + } else { + LOGGER.debug("Inserted {} ", rid); + } + } else { + LOGGER.debug("Updated {} ", rid); + } + } + + // Indexing --------------------------------------------------------------------------- + indexer.index(statementCache, keySpace, columnFamily, key, rid, values); + + endBlock(autoCommit); + } catch (SQLException e) { + abandonBlock(autoCommit); + resetConnection(statementCache); + LOGGER.warn("Failed to perform insert/update operation on {}:{}:{} ", new Object[] { + keySpace, columnFamily, key }, e); + throw new StorageClientException(e.getMessage(), e); + } catch (IOException e) { + abandonBlock(autoCommit); + LOGGER.warn("Failed to perform insert/update operation on {}:{}:{} ", new Object[] { + keySpace, columnFamily, key }, e); + throw new StorageClientException(e.getMessage(), e); + } finally { + closeStatementCache(statementCache); + } + } + + private void checkSlow(long t, String sql) { + t = System.currentTimeMillis() - t; + if ( t > 100 ) { + SQL_LOGGER.info("Slow Query {} {} ",t, sql); + } + } + + String getSql(String keySpace, String columnFamily, String name) { + return getSql(new String[]{ + name+"."+keySpace+"."+columnFamily, + name+"."+keySpace, + name + }); + } + + private void abandonBlock(boolean autoCommit) { + if (autoCommit) { + try { + Connection connection = jdbcStorageClientConnection.getConnection(); + connection.rollback(); + connection.setAutoCommit(autoCommit); + if ( storageClientListener != null ) { + storageClientListener.rollback(); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + } + } + + private void endBlock(boolean autoCommit) throws SQLException { + if (autoCommit) { + Connection connection = jdbcStorageClientConnection.getConnection(); + connection.commit(); + connection.setAutoCommit(autoCommit); + if ( storageClientListener != null ) { + storageClientListener.commit(); + } + } + } + + private boolean startBlock() throws SQLException { + Connection connection = jdbcStorageClientConnection.getConnection(); + boolean autoCommit = connection.getAutoCommit(); + connection.setAutoCommit(false); + if ( storageClientListener != null ) { + storageClientListener.begin(); + } + return autoCommit; + } + + + String getDebugRowId(String keySpace, String columnFamily, String key) { + return keySpace + ":" + columnFamily + ":" + key; + } + + public void remove(String keySpace, String columnFamily, String key) + throws StorageClientException { + checkActive(); + PreparedStatement deleteStringRow = null; + PreparedStatement deleteBlockRow = null; + String rid = rowHash(keySpace, columnFamily, key); + boolean autoCommit = false; + try { + autoCommit = startBlock(); + if ( storageClientListener != null ) { + storageClientListener.delete(keySpace, columnFamily, key); + } + deleteStringRow = getStatement(keySpace, columnFamily, SQL_DELETE_STRING_ROW, rid, null); + inc("deleteStringRow"); + deleteStringRow.clearWarnings(); + deleteStringRow.clearParameters(); + deleteStringRow.setString(1, rid); + deleteStringRow.executeUpdate(); + + deleteBlockRow = getStatement(keySpace, columnFamily, SQL_BLOCK_DELETE_ROW, rid, null); + inc("deleteBlockRow"); + deleteBlockRow.clearWarnings(); + deleteBlockRow.clearParameters(); + deleteBlockRow.setString(1, rid); + deleteBlockRow.executeUpdate(); + endBlock(autoCommit); + } catch (SQLException e) { + abandonBlock(autoCommit); + resetConnection(null); + LOGGER.warn("Failed to perform delete operation on {}:{}:{} ", new Object[] { keySpace, + columnFamily, key }, e); + throw new StorageClientException(e.getMessage(), e); + } finally { + close(deleteStringRow, "deleteStringRow"); + close(deleteBlockRow, "deleteBlockRow"); + } + } + + public void close() { + passivate(); + jdbcStorageClientConnection.releaseClient(this); + } + + public void destroy() { + if (destroyed == null) { + try { + destroyed = new Exception("Connection Closed Traceback"); + } catch (Throwable t) { + LOGGER.error("Failed to dispose connection ", t); + } + } + } + + private void checkActive() throws StorageClientException { + checkActive(true); + } + + private void checkActive(boolean checkForActive) throws StorageClientException { + if (destroyed != null) { + LOGGER.warn("Using a disposed storage client "); + throw new StorageClientException( + "Client was destroyed, traceback of destroy location follows ", destroyed); + } + if ( checkForActive ) { + if ( passivate != null ) { + LOGGER.warn("Using a passive storage client"); + throw new StorageClientException( + "Client has been passivated traceback of passivate location follows ", passivate); + } + if ( ! active ) { + LOGGER.warn("Using a passive storage client, no passivate location"); + throw new StorageClientException( + "Client has been passivated"); + + } + } + } + + /** + * Get a prepared statement, potentially optimized and sharded. + * + * @param keySpace + * @param columnFamily + * @param sqlSelectStringRow + * @param rid + * @param statementCache + * @return + * @throws SQLException + */ + PreparedStatement getStatement(String keySpace, String columnFamily, + String sqlSelectStringRow, String rid, Map statementCache) + throws SQLException { + String shard = rid.substring(0, 1); + String[] keys = new String[] { + sqlSelectStringRow + "." + keySpace + "." + columnFamily + "._" + shard, + sqlSelectStringRow + "." + columnFamily + "._" + shard, + sqlSelectStringRow + "." + keySpace + "._" + shard, + sqlSelectStringRow + "._" + shard, + sqlSelectStringRow + "." + keySpace + "." + columnFamily, + sqlSelectStringRow + "." + columnFamily, sqlSelectStringRow + "." + keySpace, + sqlSelectStringRow }; + for (String k : keys) { + if (sqlConfig.containsKey(k)) { + LOGGER.debug("Using Statement {} ",sqlConfig.get(k)); + if (statementCache != null && statementCache.containsKey(k)) { + return statementCache.get(k); + } else { + + PreparedStatement pst = jdbcStorageClientConnection.getConnection() + .prepareStatement((String) sqlConfig.get(k)); + if (statementCache != null) { + inc("cachedStatement"); + statementCache.put(k, pst); + } + return pst; + } + } + } + return null; + } + + PreparedStatement getStatement(String sql, Map statementCache) throws SQLException { + PreparedStatement pst = null; + if ( statementCache != null ) { + if ( statementCache.containsKey(sql)) { + pst = statementCache.get(sql); + } else { + pst = jdbcStorageClientConnection.getConnection().prepareStatement(sql); + inc("cachedStatement"); + statementCache.put(sql, pst); + } + } else { + pst = jdbcStorageClientConnection.getConnection().prepareStatement(sql); + } + return pst; + } + + + private void disposeDisposables() { + List dList = null; + // this shoud not be necessary, but just in case. + synchronized (desponseLock ) { + dList = toDispose; + toDispose = Lists.newArrayList(); + } + for (Disposable d : dList) { + d.close(); + } + dList.clear(); + } + + public void unregisterDisposable(Disposable disposable) { + synchronized (desponseLock) { + toDispose.remove(disposable); + } + } + + public T registerDisposable(T disposable) { + // this should not be necessary, but just in case some one is sharing the client between threads. + synchronized (desponseLock) { + toDispose.add(disposable); + disposable.setDisposer(this); + } + return disposable; + } + + public boolean validate() throws StorageClientException { + checkActive(false); + Statement statement = null; + try { + // just get a connection, that will be enough to validate. + // this is not a perfect solution. A better solution would be to handle the failiure in the client code on update. + statement = jdbcStorageClientConnection.getConnection().createStatement(); + return true; + } catch (SQLException e) { + LOGGER.warn("Failed to validate connection ", e); + return false; + } finally { + try { + statement.close(); + } catch (Throwable e) { + LOGGER.debug("Failed to close statement in validate ", e); + } + } + } + + String getSql(String[] keys) { + for (String statementKey : keys) { + String sql = getSql(statementKey); + if (sql != null) { + return sql; + } + } + return null; + } + + private String getSql(String statementName) { + return (String) sqlConfig.get(statementName); + } + + public void checkSchema(String[] clientConfigLocations) throws ClientPoolException, + StorageClientException { + checkActive(); + Statement statement = null; + try { + + statement = jdbcStorageClientConnection.getConnection().createStatement(); + try { + statement.execute(getSql(SQL_CHECKSCHEMA)); + inc("schema"); + LOGGER.info("Schema Exists"); + return; + } catch (SQLException e) { + LOGGER.info("Schema does not exist {}", e.getMessage()); + } + + for (String clientSQLLocation : clientConfigLocations) { + String clientDDL = clientSQLLocation + ".ddl"; + InputStream in = this.getClass().getClassLoader().getResourceAsStream(clientDDL); + if (in != null) { + try { + BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF8")); + int lineNo = 1; + String line = br.readLine(); + StringBuilder sqlStatement = new StringBuilder(); + while (line != null) { + line = StringUtils.stripEnd(line, null); + if (!line.isEmpty()) { + if (line.startsWith(SQL_COMMENT)) { + LOGGER.info("Comment {} ", line); + } else if (line.endsWith(SQL_EOL)) { + sqlStatement.append(line.substring(0, line.length() - 1)); + String ddl = sqlStatement.toString(); + try { + statement.executeUpdate(ddl); + LOGGER.info("SQL OK {}:{} {} ", new Object[] { + clientDDL, lineNo, ddl }); + } catch (SQLException e) { + LOGGER.warn("SQL ERROR {}:{} {} {} ", new Object[] { + clientDDL, lineNo, ddl, e.getMessage() }); + } + sqlStatement = new StringBuilder(); + } else { + sqlStatement.append(line); + } + } + line = br.readLine(); + lineNo++; + } + br.close(); + LOGGER.info("Schema Created from {} ", clientDDL); + + break; + } catch (Throwable e) { + LOGGER.error("Failed to load Schema from {}", clientDDL, e); + } finally { + try { + in.close(); + } catch (IOException e) { + LOGGER.error("Failed to close stream from {}", clientDDL, e); + } + + } + } else { + LOGGER.info("No Schema found at {} ", clientDDL); + } + + } + + } catch (SQLException e) { + LOGGER.info("Failed to create schema ", e); + throw new ClientPoolException("Failed to create schema ", e); + } finally { + try { + statement.close(); + dec("schema"); + } catch (Throwable e) { + LOGGER.debug("Failed to close statement in validate ", e); + } + } + + + } + + public void activate() { + passivate = null; + active = true; + } + + public void passivate() { + if (active) { + passivate = new Exception("Passivate Traceback"); + disposeDisposables(); + active = false; + } + } + + public Map streamBodyIn(String keySpace, String columnFamily, String contentId, + String contentBlockId, String streamId, Map content, InputStream in) + throws StorageClientException, AccessDeniedException, IOException { + checkActive(); + return streamedContentHelper.writeBody(keySpace, columnFamily, contentId, contentBlockId, + streamId, content, in); + } + + public InputStream streamBodyOut(String keySpace, String columnFamily, String contentId, + String contentBlockId, String streamId, Map content) + throws StorageClientException, AccessDeniedException, IOException { + checkActive(); + final InputStream in = streamedContentHelper.readBody(keySpace, columnFamily, + contentBlockId, streamId, content); + if ( in != null ) { + registerDisposable(new StreamDisposable(in)); + } + return in; + } + + public boolean hasBody(Map content, String streamId) { + return streamedContentHelper.hasStream(content, streamId); + } + + protected Connection getConnection() throws StorageClientException, SQLException { + checkActive(); + return jdbcStorageClientConnection.getConnection(); + } + + public DisposableIterator> listChildren(String keySpace, String columnFamily, String key, DirectCacheAccess cachingManager) throws StorageClientException { + // this will load all child object directly. + String hash = rowHash(keySpace, columnFamily, key); + LOGGER.debug("Finding {}:{}:{} as {} ",new Object[]{keySpace,columnFamily, key, hash}); + return find(keySpace, columnFamily, ImmutableMap.of(Content.PARENT_HASH_FIELD, (Object)hash, StorageConstants.CUSTOM_STATEMENT_SET, "listchildren", StorageConstants.CACHEABLE, true), cachingManager); + } + + public DisposableIterator> find(final String keySpace, final String columnFamily, + Map properties, DirectCacheAccess cachingManager) throws StorageClientException { + checkActive(); + return indexer.find(keySpace, columnFamily, properties, cachingManager); + + + } + + + public DisposableIterator listAll(String keySpace, final String columnFamily) throws StorageClientException { + String[] keys = new String[] { "list-all." + keySpace + "." + columnFamily, + "list-all." + columnFamily, "list-all" }; + String sql = null; + for (String statementKey : keys) { + sql = getSql(statementKey); + if (sql != null) { + break; + } + } + if ( sql == null ) { + throw new StorageClientException("Cant find sql statement for one of "+Arrays.toString(keys)); + } + PreparedStatement tpst = null; + ResultSet trs = null; + try { + LOGGER.debug("Preparing {} ", sql); + tpst = jdbcStorageClientConnection.getConnection().prepareStatement(sql); + inc("iterator"); + tpst.clearParameters(); + + long qtime = System.currentTimeMillis(); + trs = tpst.executeQuery(); + qtime = System.currentTimeMillis() - qtime; + if ( qtime > slowQueryThreshold && qtime < verySlowQueryThreshold) { + SQL_LOGGER.warn("Slow Query {}ms {} params:[{}]",new Object[]{qtime,sql}); + } else if ( qtime > verySlowQueryThreshold ) { + SQL_LOGGER.error("Very Slow Query {}ms {} params:[{}]",new Object[]{qtime,sql}); + } + inc("iterator r"); + LOGGER.debug("Executed "); + + // pass control to the iterator. + final PreparedStatement pst = tpst; + final ResultSet rs = trs; + tpst = null; + trs = null; + return registerDisposable(new PreemptiveIterator() { + + private SparseRow nextValue = null; + private boolean open = true; + + @Override + protected SparseRow internalNext() { + return nextValue; + } + + @Override + protected boolean internalHasNext() { + try { + while (open && rs.next()) { + try { + Map values = Maps.newHashMap(); + String rid = rs.getString(1); + Types.loadFromStream(rid, values, rs.getBinaryStream(2), columnFamily); + nextValue = new SparseMapRow(rid,values); + return true; + } catch (IOException e) { + LOGGER.error(e.getMessage(),e); + nextValue = null; + } + } + close(); + nextValue = null; + LOGGER.debug("End of Set "); + return false; + } catch (SQLException e) { + LOGGER.error(e.getMessage(), e); + close(); + nextValue = null; + return false; + } + } + + @Override + public void close() { + if (open) { + open = false; + try { + if (rs != null) { + rs.close(); + dec("iterator r"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + try { + if (pst != null) { + pst.close(); + dec("iterator"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + super.close(); + } + + } + }); + } catch (SQLException e) { + resetConnection(null); + LOGGER.error(e.getMessage(), e); + throw new StorageClientException(e.getMessage() + " SQL Statement was " + sql, + e); + } finally { + // trs and tpst will only be non null if control has not been passed + // to the iterator. + try { + if (trs != null) { + trs.close(); + dec("iterator r"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + try { + if (tpst != null) { + tpst.close(); + dec("iterator"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + } + + } + + void dec(String key) { + AtomicInteger cn = counters.get(key); + if (cn == null) { + LOGGER.warn("Never Statement/ResultSet Created Counter {} ", key); + } else { + cn.decrementAndGet(); + } + } + + void inc(String key) { + AtomicInteger cn = counters.get(key); + if (cn == null) { + cn = new AtomicInteger(); + counters.put(key, cn); + } + int c = cn.incrementAndGet(); + if (c > 10) { + LOGGER.warn( + "Counter {} Leaking {}, please investigate. This will eventually cause an OOM Error. ", + key, c); + } + } + + private void close(ResultSet rs, String name) { + try { + if (rs != null) { + rs.close(); + dec(name); + } + } catch (Throwable e) { + LOGGER.debug("Failed to close result set, ok to ignore this message ", e); + } + } + + private void close(PreparedStatement pst, String name) { + try { + if (pst != null) { + pst.close(); + dec(name); + } + } catch (Throwable e) { + LOGGER.debug("Failed to close prepared set, ok to ignore this message ", e); + } + } + + void resetConnection(Map statementCache) { + if ( statementCache != null ) { + closeStatementCache(statementCache); + } + jdbcStorageClientConnection.resetConnection(); + } + + public void closeStatementCache(Map statementCache) { + for (PreparedStatement pst : statementCache.values()) { + if (pst != null) { + try { + pst.close(); + dec("cachedStatement"); + } catch (SQLException e) { + LOGGER.debug(e.getMessage(), e); + } + } + } + } + + public Map syncIndexColumns() throws StorageClientException, SQLException { + checkActive(); + String selectColumns = getSql(SQL_INDEX_COLUMN_NAME_SELECT); + String insertColumns = getSql(SQL_INDEX_COLUMN_NAME_INSERT); + String updateTable = getSql("alter-widestring-table"); + String updateIndexes = getSql("index-widestring-table"); + if ( selectColumns == null || insertColumns == null ) { + LOGGER.warn("Using Key Value Pair Tables for indexing "); + LOGGER.warn(" This will cause scalability problems eventually, please see KERN-1957 "); + LOGGER.warn(" To fix, port your SQL Configuration file to use a wide index table. "); + return null; // no wide column support in this JDBC config. + } + PreparedStatement selectColumnsPst = null; + PreparedStatement insertColumnsPst = null; + ResultSet rs = null; + Connection connection = jdbcStorageClientConnection.getConnection(); + Statement statement = null; + try { + selectColumnsPst = connection.prepareStatement(selectColumns); + insertColumnsPst = connection.prepareStatement(insertColumns); + statement = connection.createStatement(); + rs = selectColumnsPst.executeQuery(); + Map cnames = Maps.newHashMap(); + Set usedColumns = Sets.newHashSet(); + while(rs.next()) { + String columnFamily = rs.getString(1); + String column = rs.getString(2); + String columnName = rs.getString(3); + cnames.put(columnFamily+":"+column, columnName); + usedColumns.add(columnFamily+":"+columnName); + } + // maxCols contiains the max col number for each cf. + // cnames contains a map of column Families each containing a map of columns with numbers. + for (String k : Sets.union(indexColumns, AUTO_INDEX_COLUMNS)) { + String[] cf = StringUtils.split(k,":",2); + if ( !cnames.containsKey(k) ) { + String cv = makeNameSafeSQL(cf[1], sqlNamePadding, maxNameLength); + if ( usedColumns.contains(cf[0]+":"+cv)) { + LOGGER.info( + "Column already exists, please provide explicit mapping indexing {} already used column {} ", + k, cv); + throw new StorageClientException( + "Column already exists, please provide explicit mapping indexing [" + + k + "] already used column [" + cv + "]"); + } + insertColumnsPst.clearParameters(); + insertColumnsPst.setString(1, cf[0]); + insertColumnsPst.setString(2, cf[1]); + insertColumnsPst.setString(3, cv); + insertColumnsPst.executeUpdate(); + cnames.put(k, cv); + usedColumns.add(cf[0]+":"+cv); + try { + statement.executeUpdate(MessageFormat.format(updateTable, cf[0], cv)); + LOGGER.info("Added Index Column OK {} Table:{} Column:{} ", + new Object[] { k, cf[0], cv }); + } catch (SQLException e) { + LOGGER.warn( + "Added Index Column Error {} Table:{} Column:{} Cause:{} ", + new Object[] { k, cf[0], cv, e.getMessage() }); + LOGGER.warn("SQL is {} ",MessageFormat.format(updateTable, cf[0], cv)); + throw new StorageClientException(e.getMessage(),e); + } + try { + statement.executeUpdate(MessageFormat.format(updateIndexes, cf[0], cv)); + LOGGER.info("Added Index Column OK {} Table:{} Column:{} ", + new Object[] { k, cf[0], cv }); + } catch (SQLException e) { + LOGGER.warn( + "Added Index Column Error {} Table:{} Column:{} Cause:{} ", + new Object[] { k, cf[0], cv, e.getMessage() }); + LOGGER.warn("SQL is {} ",MessageFormat.format(updateIndexes, cf[0], cv)); + throw new StorageClientException(e.getMessage(),e); + } + } + } + // sync done, now create a quick lookup table to extract the storage column for any column name, + Builder b = ImmutableMap.builder(); + for (Entry e : cnames.entrySet()) { + b.put(e.getKey(), e.getValue()); + LOGGER.info("Column Config {} maps to {} ",e.getKey(), e.getValue()); + } + + + + return b.build(); + } finally { + if ( rs != null ) { + try { + rs.close(); + } catch ( SQLException e ) { + LOGGER.debug(e.getMessage(),e); + } + } + if ( selectColumnsPst != null ) { + try { + selectColumnsPst.close(); + } catch ( SQLException e ) { + LOGGER.debug(e.getMessage(),e); + } + } + if ( insertColumnsPst != null ) { + try { + insertColumnsPst.close(); + } catch ( SQLException e ) { + LOGGER.debug(e.getMessage(),e); + } + } + if ( statement != null ) { + try { + statement.close(); + } catch ( SQLException e ) { + LOGGER.debug(e.getMessage(),e); + } + } + } + } + + private String makeNameSafeSQL(String name, boolean padding, int maxLength) { + if ( COLUMN_NAME_MAPPING.containsKey(name)) { + return COLUMN_NAME_MAPPING.get(name); + } + char[] c = name.toCharArray(); + char[] cout = new char[c.length]; + int e = 0; + int start = 0; + if ( c[0] == '_') { + if ( padding ) { + cout[e] = 'X'; + e++; + } + start = 1; + } + for(int i = start; i < c.length; i++) { + if ( !Character.isLetterOrDigit(c[i]) ) { + if ( !padding ) { + cout[e] = '_'; + e++; + } + } else { + cout[e] = c[i]; + e++; + } + } + return new String(cout,0,Math.min(e, maxLength)); + } + + public long getSlowQueryThreshold() { + return slowQueryThreshold; + } + + public long getVerySlowQueryThreshold() { + return verySlowQueryThreshold; + } + + public Indexer getIndexer() { + return indexer; + } + + public long allCount(String keySpace, String columnFamily) throws StorageClientException { + + String[] keys = new String[] { "list-all-count." + keySpace + "." + columnFamily, + "list-all-count." + columnFamily, "list-all-count" }; + String sql = null; + for (String statementKey : keys) { + sql = getSql(statementKey); + if (sql != null) { + break; + } + } + if ( sql == null ) { + throw new StorageClientException("Cant find sql statement for one of "+Arrays.toString(keys)); + } + PreparedStatement tpst = null; + ResultSet trs = null; + try { + LOGGER.debug("Preparing {} ", sql); + tpst = jdbcStorageClientConnection.getConnection().prepareStatement(sql); + inc("iterator"); + tpst.clearParameters(); + + long qtime = System.currentTimeMillis(); + trs = tpst.executeQuery(); + qtime = System.currentTimeMillis() - qtime; + if ( qtime > slowQueryThreshold && qtime < verySlowQueryThreshold) { + SQL_LOGGER.warn("Slow Query {}ms {} params:[{}]",new Object[]{qtime,sql}); + } else if ( qtime > verySlowQueryThreshold ) { + SQL_LOGGER.error("Very Slow Query {}ms {} params:[{}]",new Object[]{qtime,sql}); + } + inc("iterator r"); + LOGGER.debug("Executed "); + if ( trs.next() ) { + return trs.getLong(1); + } + return 0; + } catch (SQLException e) { + resetConnection(null); + LOGGER.error(e.getMessage(), e); + throw new StorageClientException(e.getMessage() + " SQL Statement was " + sql, + e); + } finally { + try { + if (trs != null) { + trs.close(); + dec("iterator r"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + try { + if (tpst != null) { + tpst.close(); + dec("iterator"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + } + } + + + public void setStorageClientListener(StorageClientListener storageClientListener) { + this.storageClientListener = storageClientListener; + } + + public Map getQueryCache() { + StorageCacheManager storageCacheManager = this.jdbcStorageClientConnection.getStorageCacheManager(); + if ( storageCacheManager != null ) { + return storageCacheManager.getCache("sparseQueryCache"); + } + return null; + } +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/KeyValueIndexer.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/KeyValueIndexer.java new file mode 100644 index 00000000..caab49fd --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/KeyValueIndexer.java @@ -0,0 +1,396 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.text.MessageFormat; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageConstants; +import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.Disposer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; + +public abstract class KeyValueIndexer extends AbstractIndexer { + + private static final int STMT_BASE = 0; + private static final int STMT_TABLE_JOIN = 1; + private static final int STMT_WHERE = 2; + private static final int STMT_WHERE_SORT = 3; + private static final int STMT_ORDER = 4; + private static final int STMT_EXTRA_COLUMNS = 5; + + private static final Logger LOGGER = LoggerFactory.getLogger(KeyValueIndexer.class); + protected JDBCStorageClient client; + + public KeyValueIndexer(JDBCStorageClient jdbcStorageClient, Set indexColumns, Map sqlConfig) { + super(indexColumns); + this.client = jdbcStorageClient; + } + + public DisposableIterator> find(final String keySpace, final String columnFamily, + Map properties, final DirectCacheAccess cacheManager) throws StorageClientException { + String[] keys = null; + if ( properties != null && properties.containsKey(StorageConstants.CUSTOM_STATEMENT_SET)) { + String customStatement = (String) properties.get(StorageConstants.CUSTOM_STATEMENT_SET); + keys = new String[] { + customStatement+ "." + keySpace + "." + columnFamily, + customStatement + "." + columnFamily, + customStatement, + "block-find." + keySpace + "." + columnFamily, + "block-find." + columnFamily, + "block-find" + }; + } else { + keys = new String[] { "block-find." + keySpace + "." + columnFamily, + "block-find." + columnFamily, "block-find" }; + } + + final boolean rawResults = properties != null && properties.containsKey(StorageConstants.RAWRESULTS); + + String sql = client.getSql(keys); + if (sql == null) { + throw new StorageClientException("Failed to locate SQL statement for any of " + + Arrays.toString(keys)); + } + + String[] statementParts = StringUtils.split(sql, ';'); + + StringBuilder tables = new StringBuilder(); + StringBuilder where = new StringBuilder(); + StringBuilder order = new StringBuilder(); + StringBuilder extraColumns = new StringBuilder(); + + // collect information on paging + long page = 0; + long items = 25; + if (properties != null) { + if (properties.containsKey(StorageConstants.PAGE)) { + page = Long.valueOf(String.valueOf(properties.get(StorageConstants.PAGE))); + } + if (properties.containsKey(StorageConstants.ITEMS)) { + items = Long.valueOf(String.valueOf(properties.get(StorageConstants.ITEMS))); + } + } else { + properties = ImmutableMap.of(); + } + long offset = page * items; + + // collect information on sorting + String[] sorts = new String[] { null, "asc" }; + String _sortProp = (String) properties.get(StorageConstants.SORT); + if (_sortProp != null) { + String[] _sorts = StringUtils.split(_sortProp); + if (_sorts.length == 1) { + sorts[0] = _sorts[0]; + } else if (_sorts.length == 2) { + sorts[0] = _sorts[0]; + sorts[1] = _sorts[1]; + } + } + + List parameters = Lists.newArrayList(); + int set = 0; + for (Entry e : properties.entrySet()) { + Object v = e.getValue(); + String k = e.getKey(); + if ( shouldFind(keySpace, columnFamily, k) || (v instanceof Map)) { + if (v != null) { + // check for a value map and treat sub terms as for OR terms. + // Only go 1 level deep; don't recurse. That's just silly. + if (v instanceof Map) { + // start the OR grouping + where.append(" ("); + @SuppressWarnings("unchecked") + Set> subterms = ((Map) v).entrySet(); + for(Iterator> subtermsIter = subterms.iterator(); subtermsIter.hasNext();) { + Entry subterm = subtermsIter.next(); + String subk = subterm.getKey(); + Object subv = subterm.getValue(); + // check that each subterm should be indexed + if (shouldFind(keySpace, columnFamily, subk)) { + set = processEntry(statementParts, tables, where, order, extraColumns, parameters, subk, subv, sorts, set); + // as long as there are more add OR + if (subtermsIter.hasNext()) { + where.append(" OR"); + } + } + } + // end the OR grouping + where.append(") AND"); + } else { + // process a first level non-map value as an AND term + + if (v instanceof Iterable) { + for (Object vo : (Iterable)v) { + set = processEntry(statementParts, tables, where, order, extraColumns, parameters, k, vo, sorts, set); + where.append(" AND"); + } + } else { + set = processEntry(statementParts, tables, where, order, extraColumns, parameters, k, v, sorts, set); + where.append(" AND"); + } + } + } else if (!k.startsWith("_")) { + LOGGER.debug("Search on {}:{} filter dropped due to null value.", columnFamily, k); + } + } else { + if (!k.startsWith("_")) { + LOGGER.warn("Search on {}:{} is not supported, filter dropped ",columnFamily,k); + } + } + } + if (where.length() == 0) { + return new DisposableIterator>() { + + private Disposer disposer; + public boolean hasNext() { + return false; + } + + public Map next() { + return null; + } + + public void remove() { + } + + public void close() { + if ( disposer != null ) { + disposer.unregisterDisposable(this); + } + } + public void setDisposer(Disposer disposer) { + this.disposer = disposer; + } + + }; + } + + if (sorts[0] != null && order.length() == 0) { + if (shouldFind(keySpace, columnFamily, sorts[0])) { + String t = "a"+set; + if ( statementParts.length > STMT_EXTRA_COLUMNS ) { + extraColumns.append(MessageFormat.format(statementParts[STMT_EXTRA_COLUMNS], t)); + } + tables.append(MessageFormat.format(statementParts[STMT_TABLE_JOIN], t)); + parameters.add(sorts[0]); + where.append(MessageFormat.format(statementParts[STMT_WHERE_SORT], t)).append(" AND"); + order.append(MessageFormat.format(statementParts[STMT_ORDER], t, sorts[1])); + } else { + LOGGER.warn("Sort on {}:{} is not supported, sort dropped", columnFamily, + sorts[0]); + } + } + + + final String sqlStatement = MessageFormat.format(statementParts[STMT_BASE], + tables.toString(), where.toString(), order.toString(), items, offset, extraColumns.toString()); + + PreparedStatement tpst = null; + ResultSet trs = null; + try { + LOGGER.debug("Preparing {} ", sqlStatement); + tpst = client.getConnection().prepareStatement(sqlStatement); + client.inc("iterator"); + tpst.clearParameters(); + int i = 1; + for (Object params : parameters) { + tpst.setObject(i, params); + LOGGER.debug("Setting {} ", params); + + i++; + } + + long qtime = System.currentTimeMillis(); + trs = tpst.executeQuery(); + qtime = System.currentTimeMillis() - qtime; + if ( qtime > client.getSlowQueryThreshold() && qtime < client.getVerySlowQueryThreshold()) { + JDBCStorageClient.SQL_LOGGER.warn("Slow Query {}ms {} params:[{}]",new Object[]{qtime,sqlStatement,Arrays.toString(parameters.toArray(new String[parameters.size()]))}); + } else if ( qtime > client.getVerySlowQueryThreshold() ) { + JDBCStorageClient.SQL_LOGGER.error("Very Slow Query {}ms {} params:[{}]",new Object[]{qtime,sqlStatement,Arrays.toString(parameters.toArray(new String[parameters.size()]))}); + } + client.inc("iterator r"); + LOGGER.debug("Executed "); + + // pass control to the iterator. + final PreparedStatement pst = tpst; + final ResultSet rs = trs; + final ResultSetMetaData rsmd = rs.getMetaData(); + tpst = null; + trs = null; + return client.registerDisposable(new PreemptiveIterator>() { + + private Map nextValue = Maps.newHashMap(); + private boolean open = true; + + @Override + protected Map internalNext() { + return nextValue; + } + + @Override + protected boolean internalHasNext() { + try { + if (open && rs.next()) { + if ( rawResults ) { + Builder b = ImmutableMap.builder(); + for (int i = 1; i <= rsmd.getColumnCount(); i++ ) { + b.put(String.valueOf(i), rs.getObject(i)); + } + nextValue = b.build(); + } else { + String id = rs.getString(1); + nextValue = client.internalGet(keySpace, columnFamily, id, cacheManager); + LOGGER.debug("Got Row ID {} {} ", id, nextValue); + } + return true; + } + close(); + nextValue = null; + LOGGER.debug("End of Set "); + return false; + } catch (SQLException e) { + LOGGER.error(e.getMessage(), e); + close(); + client.resetConnection(null); + nextValue = null; + return false; + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + close(); + nextValue = null; + return false; + } + } + + @Override + public void close() { + if (open) { + open = false; + try { + if (rs != null) { + rs.close(); + client.dec("iterator r"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + try { + if (pst != null) { + pst.close(); + client.dec("iterator"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + super.close(); + } + + } + }); + } catch (SQLException e) { + LOGGER.error(e.getMessage(), e); + throw new StorageClientException(e.getMessage() + " SQL Statement was " + sqlStatement, + e); + } finally { + // trs and tpst will only be non null if control has not been passed + // to the iterator. + try { + if (trs != null) { + trs.close(); + client.dec("iterator r"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + try { + if (tpst != null) { + tpst.close(); + client.dec("iterator"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + } + } + + + /** + * @param statementParts + * @param where + * @param params + * @param k + * @param v + * @param t + * @param conjunctionOr + */ + private int processEntry(String[] statementParts, StringBuilder tables, + StringBuilder where, StringBuilder order, StringBuilder extraColumns, List params, String k, Object v, + String[] sorts, int set) { + String t = "a" + set; + tables.append(MessageFormat.format(statementParts[STMT_TABLE_JOIN], t)); + + if (v instanceof Iterable) { + for (Iterator vi = ((Iterable) v).iterator(); vi.hasNext();) { + Object viObj = vi.next(); + + params.add(k); + params.add(viObj); + where.append(" (").append(MessageFormat.format(statementParts[STMT_WHERE], t)).append(")"); + + // as long as there are more add OR + if (vi.hasNext()) { + where.append(" OR"); + } + } + } else { + params.add(k); + params.add(v); + where.append(" (").append(MessageFormat.format(statementParts[STMT_WHERE], t)).append(")"); + } + + // add in sorting based on the table ref and value + if (k.equals(sorts[0])) { + order.append(MessageFormat.format(statementParts[STMT_ORDER], t, sorts[1])); + if ( statementParts.length > STMT_EXTRA_COLUMNS ) { + extraColumns.append(MessageFormat.format(statementParts[STMT_EXTRA_COLUMNS], t)); + } + } + return set+1; + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/NonBatchInsertIndexer.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/NonBatchInsertIndexer.java new file mode 100644 index 00000000..646683d5 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/NonBatchInsertIndexer.java @@ -0,0 +1,158 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; + +public class NonBatchInsertIndexer extends KeyValueIndexer { + + private static final Logger LOGGER = LoggerFactory.getLogger(NonBatchInsertIndexer.class); + + public NonBatchInsertIndexer(JDBCStorageClient jdbcStorageClient, Set indexColumns, Map sqlConfig) { + super(jdbcStorageClient, indexColumns, sqlConfig); + } + + public void index( Map statementCache, String keySpace, String columnFamily, String key, String rid, Map values) throws StorageClientException, SQLException { + String rowId = client.getDebugRowId(keySpace, columnFamily, key); + for (Entry e : values.entrySet()) { + String k = e.getKey(); + Object o = e.getValue(); + if (shouldIndex(keySpace, columnFamily, k)) { + if (o instanceof RemoveProperty || o == null) { + PreparedStatement removeStringColumn = client.getStatement(keySpace, + columnFamily, JDBCStorageClient.SQL_REMOVE_STRING_COLUMN, rid, statementCache); + removeStringColumn.clearWarnings(); + removeStringColumn.clearParameters(); + removeStringColumn.setString(1, rid); + removeStringColumn.setString(2, k); + int nrows = removeStringColumn.executeUpdate(); + if (nrows == 0) { + Map m = client.get(keySpace, columnFamily, key); + LOGGER.debug( + "Column Not present did not remove {} {} Current Column:{} ", + new Object[] { rowId , k, m }); + } else { + LOGGER.debug("Removed Index {} {} {} ", + new Object[]{rowId, k, nrows}); + } + } else { + PreparedStatement removeStringColumn = client.getStatement(keySpace, + columnFamily, JDBCStorageClient.SQL_REMOVE_STRING_COLUMN, rid, statementCache); + removeStringColumn.clearWarnings(); + removeStringColumn.clearParameters(); + removeStringColumn.setString(1, rid); + removeStringColumn.setString(2, k); + int nrows = removeStringColumn.executeUpdate(); + if (nrows == 0) { + Map m = client.get(keySpace, columnFamily, key); + LOGGER.debug( + "Column Not present did not remove {} {} Current Column:{} ", + new Object[] { rowId, k, m }); + } else { + LOGGER.debug("Removed Index {} {} {} ", + new Object[]{rowId, k, nrows}); + } + Object[] os = (o instanceof Object[]) ? (Object[]) o : new Object[] { o }; + for (Object ov : os) { + String v = ov.toString(); + PreparedStatement insertStringColumn = client.getStatement(keySpace, + columnFamily, JDBCStorageClient.SQL_INSERT_STRING_COLUMN, rid, statementCache); + insertStringColumn.clearWarnings(); + insertStringColumn.clearParameters(); + insertStringColumn.setString(1, v); + insertStringColumn.setString(2, rid); + insertStringColumn.setString(3, k); + LOGGER.debug("Non Batch Insert Index {} {}", k, v); + if (insertStringColumn.executeUpdate() == 0) { + throw new StorageClientException("Failed to save " + + rowId + " column:[" + + k + "] "); + } else { + LOGGER.debug("Inserted Index {} {} [{}]", + new Object[] { rowId, + k, v }); + } + } + } + } + } + + if (!StorageClientUtils.isRoot(key)) { + String parent = StorageClientUtils.getParentObjectPath(key); + String hash = client.rowHash(keySpace, columnFamily, parent); + LOGGER.debug("Hash of {}:{}:{} is {} ", new Object[] { keySpace, columnFamily, + parent, hash }); + Map autoIndexMap = ImmutableMap.of( + Content.PARENT_HASH_FIELD, (Object) hash); + for (Entry e : autoIndexMap.entrySet()) { + String k = e.getKey(); + Object v = e.getValue(); + PreparedStatement removeStringColumn = client.getStatement(keySpace, columnFamily, + JDBCStorageClient.SQL_REMOVE_STRING_COLUMN, rid, statementCache); + removeStringColumn.clearWarnings(); + removeStringColumn.clearParameters(); + removeStringColumn.setString(1, rid); + removeStringColumn.setString(2, k); + int nrows = removeStringColumn.executeUpdate(); + if (nrows == 0) { + Map m = client.get(keySpace, columnFamily, key); + LOGGER.debug( + "Column Not present did not remove {} {} Current Column:{} ", + new Object[] { rowId, k, m }); + } else { + LOGGER.debug( + "Removed Index {} {} {} ", + new Object[] { rowId, k, nrows }); + } + + PreparedStatement insertStringColumn = client.getStatement(keySpace, columnFamily, + JDBCStorageClient.SQL_INSERT_STRING_COLUMN, rid, statementCache); + insertStringColumn.clearWarnings(); + insertStringColumn.clearParameters(); + insertStringColumn.setString(1, v.toString()); + insertStringColumn.setString(2, rid); + insertStringColumn.setString(3, k); + LOGGER.debug("Non Batch Insert Index {} {}", k, v); + if (insertStringColumn.executeUpdate() == 0) { + throw new StorageClientException("Failed to save " + + rowId + " column:[" + k + + "] "); + } else { + LOGGER.debug("Inserted Index {} {} [{}]", + new Object[] { rowId, k, v }); + } + } + } + + } + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/PreemptiveCachedMapIterator.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/PreemptiveCachedMapIterator.java new file mode 100644 index 00000000..1a78597f --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/PreemptiveCachedMapIterator.java @@ -0,0 +1,194 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.List; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.Maps; + +public class PreemptiveCachedMapIterator extends PreemptiveIterator> { + + private static final Logger LOGGER = LoggerFactory.getLogger(PreemptiveCachedMapIterator.class); + private Map nextValue = Maps.newHashMap(); + private List> preloadedResults = null; + private boolean open = true; + private boolean started = false; + private int preloadedResultsIndex; + private ResultSet resultSet; + private boolean rawResults; + private String keySpace; + private String columnFamily; + private DirectCacheAccess cachingManager; + private JDBCStorageClient client; + private PreparedStatement preparedStatement; + private ResultSetMetaData resultSetMetadata; + + /** + * Construct an iterator from a query response. + * @param client + * @param resultSet + * @param preparedStatement + * @param rawResults + * @param cachingManager + * @throws SQLException + */ + public PreemptiveCachedMapIterator(JDBCStorageClient client, String keySpace, String columnFamily, ResultSet resultSet, + PreparedStatement preparedStatement, boolean rawResults, + DirectCacheAccess cachingManager) throws SQLException { + this.keySpace = keySpace; + this.columnFamily = columnFamily; + this.resultSet = resultSet; + this.rawResults = rawResults; + this.client = client; + this.cachingManager = cachingManager; + + this.resultSetMetadata = resultSet.getMetaData(); + this.preparedStatement = preparedStatement; + } + + /** + * Construct a iterator from a cached response. + * @param client + * @param cachedResults + * @param cachingManager + */ + @SuppressWarnings("unchecked") + public PreemptiveCachedMapIterator(JDBCStorageClient client, String keySpace, String columnFamily, Map cachedResults, boolean rawResults, + DirectCacheAccess cachingManager) { + this.keySpace = keySpace; + this.columnFamily = columnFamily; + this.rawResults = rawResults; + this.client = client; + this.cachingManager = cachingManager; + + this.preloadedResults = (List>) cachedResults.get("rows"); + this.preloadedResultsIndex = 0; + } + + @Override + protected Map internalNext() { + started = true; + return nextValue; + } + + @Override + public Map getResultsMap() { + try { + // load the first X results into a list, for caching. If we still + // have more, then stop and return a null. + if (preloadedResults == null && resultSet != null) { + if (started) { + throw new IllegalStateException("Cant get results map once iteration has started"); + } + com.google.common.collect.ImmutableList.Builder> resultsBuilder = ImmutableList + .builder(); + int size = 0; + while (size < 500 && resultSet.next()) { + Builder b = ImmutableMap.builder(); + for (int i = 1; i <= resultSetMetadata.getColumnCount(); i++) { + b.put(String.valueOf(i), resultSet.getObject(i)); + } + resultsBuilder.add(b.build()); + size++; + } + preloadedResults = resultsBuilder.build(); + preloadedResultsIndex = 0; + if (size >= 500) { + // don't cache if there are more than 500 results. + return null; + } + } + if ( preloadedResults == null && resultSet == null ) { + throw new IllegalStateException("Cant get results map, no source results set."); + } + return ImmutableMap.of("rows", (Object) preloadedResults); + } catch (SQLException e) { + LOGGER.error(e.getMessage(), e); + close(); + nextValue = null; + return null; + } + } + + @Override + protected boolean internalHasNext() { + try { + started = true; + if (open && preloadedResults != null && preloadedResultsIndex < preloadedResults.size()) { + if (rawResults) { + nextValue = preloadedResults.get(preloadedResultsIndex); + } else { + String id = (String) preloadedResults.get(preloadedResultsIndex).get("1"); + nextValue = client.internalGet(keySpace, columnFamily, id, cachingManager); + } + preloadedResultsIndex++; + return true; + } + if (open && resultSet != null && resultSet.next()) { + if (rawResults) { + Builder b = ImmutableMap.builder(); + for (int i = 1; i <= resultSetMetadata.getColumnCount(); i++) { + b.put(String.valueOf(i), resultSet.getObject(i)); + } + nextValue = b.build(); + } else { + String id = resultSet.getString(1); + nextValue = client.internalGet(keySpace, columnFamily, id, cachingManager); + LOGGER.debug("Got Row ID {} {} ", id, nextValue); + } + return true; + } + close(); + nextValue = null; + LOGGER.debug("End of Set "); + return false; + } catch (SQLException e) { + LOGGER.error(e.getMessage(), e); + close(); + nextValue = null; + return false; + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + close(); + nextValue = null; + return false; + } + } + + @Override + public void close() { + if (open) { + open = false; + try { + if (resultSet != null) { + resultSet.close(); + client.dec("iterator r"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + try { + if (preparedStatement != null) { + preparedStatement.close(); + client.dec("iterator"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + super.close(); + } + + } +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/StreamDisposable.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/StreamDisposable.java new file mode 100644 index 00000000..975d0b88 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/StreamDisposable.java @@ -0,0 +1,38 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.io.IOException; +import java.io.InputStream; + +import org.sakaiproject.nakamura.lite.storage.spi.Disposable; +import org.sakaiproject.nakamura.lite.storage.spi.Disposer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class StreamDisposable implements Disposable { + + private static final Logger LOGGER = LoggerFactory.getLogger(StreamDisposable.class); + private boolean open = true; + private Disposer disposer = null; + private InputStream in; + + public StreamDisposable(InputStream in) { + this.in = in; + } + public void close() { + if (open && in != null) { + try { + in.close(); + } catch (IOException e) { + LOGGER.warn(e.getMessage(), e); + } + if ( disposer != null ) { + disposer.unregisterDisposable(this); + } + open = false; + + } + } + public void setDisposer(Disposer disposer) { + this.disposer = disposer; + } +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/WideColumnIndexer.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/WideColumnIndexer.java new file mode 100644 index 00000000..d258e0a5 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/WideColumnIndexer.java @@ -0,0 +1,689 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.text.MessageFormat; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.StorageConstants; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.lite.storage.spi.CachableDisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +public class WideColumnIndexer extends AbstractIndexer implements CachingIndexer { + + private static final String SQL_INSERT_WIDESTRING_ROW = "insert-widestring-row"; + private static final String SQL_UPDATE_WIDESTRING_ROW = "update-widestring-row"; + private static final String SQL_DELETE_WIDESTRING_ROW = "delete-widestring-row"; + private static final String SQL_EXISTS_WIDESTRING_ROW = "exists-widestring-row"; + private static final int SQL_QUERY_TEMPLATE_PART = 0; + private static final int SQL_WHERE_PART = 1; + private static final int SQL_WHERE_ARRAY_PART = 2; + private static final int SQL_WHERE_ARRAY_WHERE_PART = 3; + private static final int SQL_SORT_CLAUSE_PART = 4; + private static final int SQL_SORT_LIST_PART = 5; + + private static final Logger LOGGER = LoggerFactory.getLogger(WideColumnIndexer.class); + /** + * Values to exclude from the query cache key. + */ + private static final Set EXCLUDE_CACHE_KEYS = ImmutableSet.of(StorageConstants.CACHEABLE); + + /** + * Set of keys in query properties that cant be cached, normally because invalidation is too hard. + */ + private static final Set DONT_CACHE_KEYS = ImmutableSet.of(StorageConstants.ITEMS, StorageConstants.PAGE, StorageConstants.SORT); + private JDBCStorageClient client; + private Map indexColumnsNames; + private Map indexColumnsTypes; + private int gets; + + public WideColumnIndexer(JDBCStorageClient jdbcStorageClient, + Map indexColumnsNames, Set indexColumnTypes, Map sqlConfig) { + super(indexColumnsNames.keySet()); + this.client = jdbcStorageClient; + this.indexColumnsNames = indexColumnsNames; + Builder b = ImmutableMap.builder(); + for (String k : Sets.union(indexColumnTypes, JDBCStorageClient.AUTO_INDEX_COLUMNS_TYPES)) { + String[] type = StringUtils.split(k,"=",2); + b.put(type[0], type[1]); + } + this.indexColumnsTypes = b.build(); + } + + public void index(Map statementCache, String keySpace, + String columnFamily, String key, String rid, Map values) + throws StorageClientException, SQLException { + ResultSet rs = null; + + try { + Set removeArrayColumns = Sets.newHashSet(); + Set removeColumns = Sets.newHashSet(); + Map updateArrayColumns = Maps.newHashMap(); + Map updateColumns = Maps.newHashMap(); + for (Entry e : values.entrySet()) { + String k = e.getKey(); + Object o = e.getValue(); + Object[] valueMembers = (o instanceof Object[]) ? (Object[]) o : new Object[] { o }; + if (shouldIndex(keySpace, columnFamily, k)) { + if (isColumnArray(keySpace, columnFamily, k)) { + if (o instanceof RemoveProperty || o == null || valueMembers.length == 0) { + removeArrayColumns.add(k); + } else { + removeArrayColumns.add(k); + updateArrayColumns.put(k, valueMembers); + } + } else { + if (o instanceof RemoveProperty || o == null || valueMembers.length == 0) { + removeColumns.add(k); + } else { + updateColumns.put(k, valueMembers[0]); + } + + } + } + } + + if (!StorageClientUtils.isRoot(key) + && getColumnName(keySpace, columnFamily, Content.PARENT_HASH_FIELD) != null) { + String parent = StorageClientUtils.getParentObjectPath(key); + String hash = client.rowHash(keySpace, columnFamily, parent); + LOGGER.debug("Hash of {}:{}:{} is {} ", new Object[] { keySpace, columnFamily, + parent, hash }); + updateColumns.put(Content.PARENT_HASH_FIELD, hash); + invalidate(keySpace, columnFamily, ImmutableMap.of(Content.PARENT_HASH_FIELD, (Object) hash, + StorageConstants.CUSTOM_STATEMENT_SET, "listchildren", StorageConstants.CACHEABLE, true)); + } + + + LOGGER.debug("Removing Array {} ",removeArrayColumns); + LOGGER.debug("Updating Array {} ",updateArrayColumns); + LOGGER.debug("Removing {} ",removeColumns); + LOGGER.debug("Updating {} ",updateColumns); + + // arrays are stored in css, so we can re-use css sql. + PreparedStatement removeStringColumn = client.getStatement(keySpace, columnFamily, + JDBCStorageClient.SQL_REMOVE_STRING_COLUMN, rid, statementCache); + int nbatch = 0; + for (String column : removeArrayColumns) { + removeStringColumn.clearWarnings(); + removeStringColumn.clearParameters(); + removeStringColumn.setString(1, rid); + removeStringColumn.setString(2, column); + removeStringColumn.addBatch(); + LOGGER.debug("Removing {} {} ",rid,column); + nbatch++; + } + if (nbatch > 0) { + long t = System.currentTimeMillis(); + removeStringColumn.executeBatch(); + checkSlow(t, client.getSql(keySpace, columnFamily, JDBCStorageClient.SQL_REMOVE_STRING_COLUMN)); + nbatch = 0; + } + + // add the column values in + PreparedStatement insertStringColumn = client.getStatement(keySpace, columnFamily, + JDBCStorageClient.SQL_INSERT_STRING_COLUMN, rid, statementCache); + for (Entry e : updateArrayColumns.entrySet()) { + for (Object o : e.getValue()) { + insertStringColumn.clearWarnings(); + insertStringColumn.clearParameters(); + insertStringColumn.setString(1, o.toString()); + insertStringColumn.setString(2, rid); + insertStringColumn.setString(3, e.getKey()); + insertStringColumn.addBatch(); + LOGGER.debug("Inserting {} {} {} ",new Object[]{o.toString(),rid,e.getKey()}); + nbatch++; + } + } + if (nbatch > 0) { + long t = System.currentTimeMillis(); + insertStringColumn.executeBatch(); + checkSlow(t, client.getSql(keySpace, columnFamily, JDBCStorageClient.SQL_INSERT_STRING_COLUMN)); + nbatch = 0; + } + if (removeColumns.size() == 0 && updateColumns.size() == 0) { + return; // nothing to add or remove, do nothing. + } + + if (removeColumns.size() > 0 && updateColumns.size() == 0) { + // exists, columns to remove, none to update, therefore + // delete row this assumes that the starting point is a + // complete map + PreparedStatement deleteWideStringColumn = client.getStatement(keySpace, + columnFamily, SQL_DELETE_WIDESTRING_ROW, rid, statementCache); + deleteWideStringColumn.clearParameters(); + deleteWideStringColumn.setString(1, rid); + long t = System.currentTimeMillis(); + deleteWideStringColumn.execute(); + checkSlow(t, client.getSql(keySpace, columnFamily, SQL_DELETE_WIDESTRING_ROW)); + LOGGER.debug("Executed {} with {} ",deleteWideStringColumn, rid); + } else if ( updateColumns.size() > 0 || removeColumns.size() > 0) { + // + // build an update query, record does not exists, but there + // is stuff to add + String[] sqlParts = StringUtils.split(client.getSql(keySpace, columnFamily, + SQL_UPDATE_WIDESTRING_ROW),";"); + StringBuilder setOperations = new StringBuilder(); + for (Entry e : updateColumns.entrySet()) { + join(setOperations," ,").append(MessageFormat.format(sqlParts[1], + getColumnName(keySpace, columnFamily, e.getKey()))); + } + for (String toRemove : removeColumns) { + join(setOperations," ,").append(MessageFormat.format(sqlParts[1], + getColumnName(keySpace, columnFamily, toRemove))); + } + String finalSql = MessageFormat.format(sqlParts[0], setOperations); + LOGGER.debug("Performing {} ",finalSql); + PreparedStatement updateColumnPst = client.getStatement(finalSql, + statementCache); + updateColumnPst.clearWarnings(); + updateColumnPst.clearParameters(); + int i = 1; + for (Entry e : updateColumns.entrySet()) { + updateColumnPst.setString(i, e.getValue().toString()); + LOGGER.debug(" Param {} {} ",i,e.getValue().toString()); + i++; + } + for (String toRemove : removeColumns) { + updateColumnPst.setNull(i, toSqlType(columnFamily, toRemove)); + LOGGER.debug(" Param {} NULL ",i); + i++; + } + LOGGER.debug(" Param {} {} ",i, rid); + updateColumnPst.setString(i, rid); + long t = System.currentTimeMillis(); + int n = updateColumnPst.executeUpdate(); + checkSlow(t, finalSql); + if ( n == 0 ) { + // part 0 is the final ,part 1 is the template for column names, + // part 2 is the template for parameters. + // insert into x ( columnsnames ) values () + StringBuilder columnNames = new StringBuilder(); + StringBuilder paramHolders = new StringBuilder(); + for (Entry e : updateColumns.entrySet()) { + columnNames.append(" ,").append(getColumnName(keySpace, columnFamily, e.getKey())); + paramHolders.append(" ,").append("?"); + } + finalSql = MessageFormat.format( + client.getSql(keySpace, columnFamily, SQL_INSERT_WIDESTRING_ROW), + columnNames.toString(), paramHolders.toString()); + LOGGER.debug("Insert SQL {} ",finalSql); + PreparedStatement insertColumnPst = client.getStatement(finalSql, statementCache); + insertColumnPst.clearWarnings(); + insertColumnPst.clearParameters(); + insertColumnPst.setString(1, rid); + LOGGER.debug(" Param 1 {} ",rid); + i = 2; + for (Entry e : updateColumns.entrySet()) { + LOGGER.debug(" Param {} {} ",i,e.getValue().toString()); + insertColumnPst.setString(i, e.getValue().toString()); + i++; + } + t = System.currentTimeMillis(); + insertColumnPst.executeUpdate(); + checkSlow(t, finalSql); + } + } + } finally { + if (rs != null) { + rs.close(); + } + } + + } + + private void checkSlow(long t, String sql) { + t = System.currentTimeMillis() - t; + if ( t > 100 ) { + JDBCStorageClient.SQL_LOGGER.info("Slow Query {} {} ",t, sql); + } + } + + private String getColumnName(String keySpace, String columnFamily, String key) { + return indexColumnsNames.get(columnFamily + ":" + key); + } + + private int toSqlType(String columnFamily, String k) { + String type = indexColumnsTypes.get(columnFamily+":"+k); + if ( type == null ) { + return Types.VARCHAR; + } else if (type.startsWith("String")) { + return Types.VARCHAR; + } else if (type.startsWith("int")) { + return Types.INTEGER; + } else if (type.startsWith("Date")) { + return Types.DATE; + } + return Types.VARCHAR; + } + + private boolean isColumnArray(String keySpace, String columnFamily, String k) { + String type = indexColumnsTypes.get(columnFamily + ":" + k); + if (type != null && type.endsWith("[]")) { + return true; + } + return false; + } + + public DisposableIterator> find(final String keySpace, final String columnFamily, + Map properties, final DirectCacheAccess cachingManager) throws StorageClientException { + + final boolean rawResults = properties != null && properties.containsKey(StorageConstants.RAWRESULTS); + + DisposableIterator> cachedIterator = cachedFind(keySpace, columnFamily, properties, rawResults, cachingManager); + if ( cachedIterator != null ) { + return cachedIterator; + } + + String[] keys = null; + if ( properties != null && properties.containsKey(StorageConstants.CUSTOM_STATEMENT_SET)) { + String customStatement = (String) properties.get(StorageConstants.CUSTOM_STATEMENT_SET); + keys = new String[] { + "wide-"+ customStatement+ "." + keySpace + "." + columnFamily, + "wide-" + customStatement + "." + columnFamily, + "wide-" + customStatement, + "wide-block-find." + keySpace + "." + columnFamily, + "wide-block-find." + columnFamily, + "wide-block-find" + }; + } else { + keys = new String[] { "wide-block-find." + keySpace + "." + columnFamily, + "wide-block-find." + columnFamily, "wide-block-find" }; + } + + + + String sql = client.getSql(keys); + if (sql == null) { + throw new StorageClientException("Failed to locate SQL statement for any of " + + Arrays.toString(keys)); + } + + + // collect information on paging + long page = 0; + long items = 25; + String sortProp = null; + if (properties != null) { + if (properties.containsKey(StorageConstants.PAGE)) { + page = Long.valueOf(String.valueOf(properties.get(StorageConstants.PAGE))); + } + if (properties.containsKey(StorageConstants.ITEMS)) { + items = Long.valueOf(String.valueOf(properties.get(StorageConstants.ITEMS))); + } + sortProp = (String) properties.get(StorageConstants.SORT); + } + long offset = page * items; + + // collect information on sorting + List sortingList = Lists.newArrayList(); + if (sortProp != null) { + String[] sorts = StringUtils.split(sortProp); + if (sorts.length == 1) { + if ( shouldIndex(keySpace, columnFamily, sorts[0]) && !isColumnArray(keySpace, columnFamily, sorts[0]) ) { + sortingList.add(getColumnName(keySpace, columnFamily, sorts[0])); + sortingList.add("asc"); + } + } else if (sorts.length > 1) { + for ( int i = 0; i < sorts.length; i+=2) { + if ( shouldIndex(keySpace, columnFamily, sorts[0]) && !isColumnArray(keySpace, columnFamily, sorts[i]) ) { + sortingList.add(getColumnName(keySpace, columnFamily, sorts[0])); + sortingList.add(sorts[i+1]); + } + } + } + } + String[] sorts = sortingList.toArray(new String[sortingList.size()]); + String[] statementParts = StringUtils.split(sql, ';'); + /* + * Part 0 basic SQL template; {0} is the where clause {1} is the sort clause {2} is the from {3} is the to record + * eg select rid from css where {0} {1} LIMIT {2} ROWS {3} + * Part 1 where clause for non array matches; {0} is the columnName + * eg {0} = ? + * Part 2 where clause for array matches (not possible to sort on array matches) {0} is the table alias, {1} is the where clause + * eg rid in ( select {0}.rid from css {0} where {1} ) + * Part 3 the where clause for array matches {0} is the table alias + * eg {0}.cid = ? and {0}.v = ? + * Part 3 sort clause {0} is the list to sort by + * eg sort by {0} + * Part 4 sort elements, {0} is the column, {1} is the order + * eg {0} {1} + * Dont include , AND or OR, the code will add those as appropriate. + */ + + StringBuilder whereClause = new StringBuilder(); + List parameters = Lists.newArrayList(); + int set = 0; + for (Entry e : properties.entrySet()) { + Object v = e.getValue(); + String k = e.getKey(); + if ( shouldFind(keySpace, columnFamily, k) || (v instanceof Map)) { + if (v != null) { + // check for a value map and treat sub terms as for OR terms. + // Only go 1 level deep; don't recurse. That's just silly. + if (v instanceof Map) { + // start the OR grouping + @SuppressWarnings("unchecked") + Set> subterms = ((Map) v).entrySet(); + StringBuilder subQuery = new StringBuilder(); + for(Iterator> subtermsIter = subterms.iterator(); subtermsIter.hasNext();) { + Entry subterm = subtermsIter.next(); + String subk = subterm.getKey(); + Object subv = subterm.getValue(); + // check that each subterm should be indexed + if (shouldFind(keySpace, columnFamily, subk)) { + set = processEntry(statementParts, keySpace, columnFamily, subQuery, parameters, subk, subv, sorts, set, " OR "); + } + } + if ( subQuery.length() > 0 ) { + join(whereClause," AND ").append("( ").append(subQuery.toString()).append(" ) "); + } + } else { + // process a first level non-map value as an AND term + + if (v instanceof Iterable) { + for (Object vo : (Iterable)v) { + set = processEntry(statementParts, keySpace, columnFamily, whereClause, parameters, k, vo, sorts, set, " AND "); + } + } else { + set = processEntry(statementParts, keySpace, columnFamily, whereClause, parameters, k, v, sorts, set, " AND "); + } + } + } else if (!k.startsWith("_")) { + LOGGER.debug("Search on {}:{} filter dropped due to null value.", columnFamily, k); + } + } else { + if (!k.startsWith("_")) { + LOGGER.warn("Search on {}:{} is not supported, filter dropped ",columnFamily,k); + } + } + } + // there was no where clause generated + // to avoid returneing everything, we wont return anything. + if (whereClause.length() == 0) { + return cacheResults( + keySpace, + columnFamily, + properties, + new PreemptiveCachedMapIterator(client, keySpace, columnFamily, ImmutableMap.of("rows", + (Object) ImmutableList.of()), rawResults, cachingManager)); + } + + StringBuilder sortClause = new StringBuilder(); + if ( statementParts.length > SQL_SORT_CLAUSE_PART ) { + StringBuilder sortList = new StringBuilder(); + for ( int i = 0; i < sorts.length; i+= 2) { + if (shouldFind(keySpace, columnFamily, sorts[0])) { + join(sortList, ", ").append(MessageFormat.format(statementParts[SQL_SORT_LIST_PART], sorts[i], sorts[i+1])); + } + } + if ( sortList.length() > 0 ) { + sortClause.append(MessageFormat.format(statementParts[SQL_SORT_CLAUSE_PART], sortList.toString())); + } + } + + final String sqlStatement = MessageFormat.format(statementParts[SQL_QUERY_TEMPLATE_PART], + whereClause.toString(), sortClause.toString(), items, offset); + + PreparedStatement tpst = null; + ResultSet trs = null; + try { + + LOGGER.debug("Preparing {} ", sqlStatement); + tpst = client.getConnection().prepareStatement(sqlStatement); + client.inc("iterator"); + tpst.clearParameters(); + int i = 1; + for (Object params : parameters) { + tpst.setObject(i, params); + LOGGER.debug("Setting {} ", params); + i++; + } + + long qtime = System.currentTimeMillis(); + trs = tpst.executeQuery(); + qtime = System.currentTimeMillis() - qtime; + if ( qtime > client.getSlowQueryThreshold() && qtime < client.getVerySlowQueryThreshold()) { + JDBCStorageClient.SQL_LOGGER.warn("Slow Query {}ms {} params:[{}]",new Object[]{qtime,sqlStatement,Arrays.toString(parameters.toArray(new String[parameters.size()]))}); + } else if ( qtime > client.getVerySlowQueryThreshold() ) { + JDBCStorageClient.SQL_LOGGER.error("Very Slow Query {}ms {} params:[{}]",new Object[]{qtime,sqlStatement,Arrays.toString(parameters.toArray(new String[parameters.size()]))}); + } + client.inc("iterator r"); + LOGGER.debug("Executed "); + + // pass control to the iterator. + PreparedStatement pst = tpst; + ResultSet rs = trs; + tpst = null; + trs = null; + return client.registerDisposable(cacheResults(keySpace, columnFamily, properties, new PreemptiveCachedMapIterator(client, keySpace, columnFamily, rs, pst, rawResults, cachingManager))); + } catch (SQLException e) { + LOGGER.error(e.getMessage(), e); + client.resetConnection(null); + throw new StorageClientException(e.getMessage() + " SQL Statement was " + sqlStatement, + e); + } finally { + // trs and tpst will only be non null if control has not been passed + // to the iterator. + try { + if (trs != null) { + trs.close(); + client.dec("iterator r"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + try { + if (tpst != null) { + tpst.close(); + client.dec("iterator"); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + } + } + } + + + + + + private StringBuilder join(StringBuilder sb, String joinWord) { + if ( sb.length() > 0 ) { + sb.append(joinWord); + } + return sb; + } + + /** + * @param statementParts + * @param where + * @param params + * @param k + * @param v + * @param t + * @param conjunctionOr + */ + private int processEntry(String[] statementParts, String keySpace, String columnFamily, StringBuilder subQuery, + List params, String key, Object value, String[] sorts, int tableIndex, + String logicalJoin) { + if ( isColumnArray(keySpace, columnFamily, key)) { + String tableName = "a"+tableIndex; + tableIndex++; + if (value instanceof Iterable) { + StringBuilder arraySubQuery = new StringBuilder(); + // SQL_WHERE_ARRAY_WHERE_PART is ( {0}cid = ? AND {0}v = ? ) + for (Iterator valueIterator = ((Iterable) value).iterator(); valueIterator.hasNext();) { + Object o = valueIterator.next(); + params.add(key); + params.add(o); + join(arraySubQuery, " OR ").append(MessageFormat.format(statementParts[SQL_WHERE_ARRAY_WHERE_PART],tableName)); + } + // SQL_WHERE_ARRAY_PART is rid in (select rid from css {0} where {1} ) + if ( arraySubQuery.length() > 0 ) { + join(subQuery, logicalJoin).append(MessageFormat.format(statementParts[SQL_WHERE_ARRAY_PART], tableName, arraySubQuery)); + } + } else { + params.add(key); + params.add(value); + String whereClause = MessageFormat.format(statementParts[SQL_WHERE_ARRAY_WHERE_PART],tableName); + join(subQuery, logicalJoin).append(MessageFormat.format(statementParts[SQL_WHERE_ARRAY_PART], tableName, whereClause)); + } + } else { + String column = getColumnName(keySpace, columnFamily, key); + if (value instanceof Iterable) { + StringBuilder arraySubQuery = new StringBuilder(); + // SQL_WHERE_PART is {0} = ? + for (Iterator valueIterator = ((Iterable) value).iterator(); valueIterator.hasNext();) { + Object o = valueIterator.next(); + params.add(o); + join(arraySubQuery, " OR ").append(MessageFormat.format(statementParts[SQL_WHERE_PART], column)); + } + if ( arraySubQuery.length() > 0 ) { + join(subQuery, logicalJoin).append(" ( ").append(arraySubQuery.toString()).append(" ) "); + } + } else { + params.add(value); + LOGGER.debug("Adding {} {} ",statementParts[SQL_WHERE_PART],column); + join(subQuery, logicalJoin).append(MessageFormat.format(statementParts[SQL_WHERE_PART], column)); + } + + } + return tableIndex; + + } + + + // Query Caching ----------------------------------------------------------------------------------- + + + @Override + public void invalidate(String keySpace, String columnFamily, Map queryProperties) { + Map queryCache = client.getQueryCache(); + if ( queryCache == null ) { + return; + } + String cacheKey = getCacheKey(keySpace, columnFamily, queryProperties); + if ( cacheKey != null ) { + LOGGER.debug("Removing Cached Query {} ",cacheKey); + queryCache.remove(cacheKey); + } + } + + private String getCacheKey(String keySpace, String columnFamily, + Map queryProperties) { + if (Boolean.parseBoolean(String.valueOf(queryProperties.get(StorageConstants.CACHEABLE)))) { + List keys = Lists.newArrayList(queryProperties.keySet()); + Collections.sort(keys); + StringBuilder sb = new StringBuilder(); + sb.append(keySpace).append(";").append(columnFamily); + boolean hasKey = false; + for ( String key : keys ) { + if ( DONT_CACHE_KEYS.contains(key)) { + hasKey = false; + LOGGER.debug("Query cant be cached becuase it contains {} ",key); + break; + } + if ( !EXCLUDE_CACHE_KEYS.contains(key) ) { + sb.append(key).append(":").append(queryProperties.get(key)).append(";"); + hasKey = true; + } + } + if ( hasKey ) { + // we might want to hash this to prevent the key getting massive. + LOGGER.debug("Cached Query Key is {} ", sb.toString()); + return sb.toString(); + } + } + return null; + } + + private DisposableIterator> cachedFind(String keySpace, + String columnFamily, Map queryProperties, boolean rawResults, DirectCacheAccess cachingManager) { + Map queryCache = client.getQueryCache(); + if ( queryCache == null ) { + gets++; + if ( gets % 1000 == 0 ) { + LOGGER.info("No Query Cache in use, please provide one."); + } + return null; + } + String cacheKey = getCacheKey(keySpace, columnFamily, queryProperties); + if ( cacheKey != null ) { + CacheHolder ch = queryCache.get(cacheKey); + if ( ch != null ) { + Map m = ch.get(); + if ( m != null ) { + return new PreemptiveCachedMapIterator(client, keySpace, columnFamily, m, rawResults, cachingManager); + } + } + } + LOGGER.debug("No Cached Query at {} ",cacheKey); + return null; + } + + + + private DisposableIterator> cacheResults(String keySpace, + String columnFamily, Map queryProperties, + CachableDisposableIterator> disposableIterator) { + Map queryCache = client.getQueryCache(); + if ( queryCache == null ) { + return disposableIterator; + } + String cacheKey = getCacheKey(keySpace, columnFamily, queryProperties); + if ( cacheKey != null ) { + Map m = disposableIterator.getResultsMap(); + if ( m != null ) { + queryCache.put(cacheKey, new CacheHolder(m)); + } + } + return disposableIterator; + } + + + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/CacheAwareMigrationManager.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/CacheAwareMigrationManager.java new file mode 100644 index 00000000..81fe57fc --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/CacheAwareMigrationManager.java @@ -0,0 +1,34 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.lite.CachingManagerImpl; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Allows cached updates, keeping the supplied shared cache in step with the updates. + * @author ieb + * + */ +public class CacheAwareMigrationManager extends CachingManagerImpl { + + public CacheAwareMigrationManager(StorageClient client, Map sharedCache) { + super(client, sharedCache); + } + + private static final Logger LOGGER = LoggerFactory.getLogger(CacheAwareMigrationManager.class); + + @Override + protected Logger getLogger() { + return LOGGER; + } + + public void insert(String keySpace, String columnFamily, String key, Map encodedProperties, boolean probablyNew) throws StorageClientException { + putCached(keySpace, columnFamily, key, encodedProperties, probablyNew); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/DependencySequence.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/DependencySequence.java new file mode 100644 index 00000000..57cea176 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/DependencySequence.java @@ -0,0 +1,100 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.sakaiproject.nakamura.api.lite.PropertyMigrator; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import com.google.common.collect.Sets.SetView; + +public class DependencySequence implements Iterable { + + private static final Set EMPTY = ImmutableSet.of(); + private Map runMigrators; + private Map availableMigrators; + private Set toRunSequence; + private SetView unresolvedMigrators; + + public DependencySequence(PropertyMigrator[] propertyMigrators, Map runMigratorRecord) { + toRunSequence = Sets.newLinkedHashSet(); + Set toRunSequenceNames = Sets.newLinkedHashSet(); + availableMigrators = Maps.newLinkedHashMap(); + runMigrators = ImmutableMap.copyOf(runMigratorRecord); + for (PropertyMigrator pm : propertyMigrators) { + availableMigrators.put(pm.getName(), pm); + } + for (String satisfiedMigrator : runMigrators.keySet()) { + if (availableMigrators.containsKey(satisfiedMigrator)) { + if (Boolean.parseBoolean(availableMigrators.get(satisfiedMigrator).getOptions() + .get(PropertyMigrator.OPTION_RUNONCE))) { + availableMigrators.remove(satisfiedMigrator); + } + } + } + for (;;) { + int resolved = 0; + for (Entry e : availableMigrators.entrySet()) { + PropertyMigrator pm = e.getValue(); + if (!toRunSequence.contains(pm)) { + // migrator has not been run previously and is not in the + // list. Check dependencies + boolean satisfied = true; + for (String dep : pm.getDependencies()) { + if ( !availableMigrators.containsKey(dep) ) { + if ( !runMigratorRecord.containsKey(dep)) { + satisfied = false; + break; + } + } else if ( !toRunSequenceNames.contains(dep) ) { + satisfied = false; + break; + } + } + if (satisfied) { + toRunSequence.add(pm); + toRunSequenceNames.add(pm.getName()); + resolved++; + } + } + } + if (toRunSequence.size() == availableMigrators.size()) { + // all satisfied + unresolvedMigrators = null; + break; + } + if (resolved == 0) { + unresolvedMigrators = Sets.difference( + ImmutableSet.copyOf(availableMigrators.values()), toRunSequence); + break; + } + } + } + + public boolean hasUnresolved() { + return unresolvedMigrators != null && unresolvedMigrators.size() > 0; + } + + public Iterator iterator() { + if (hasUnresolved()) { + return EMPTY.iterator(); + } + return toRunSequence.iterator(); + } + + public Set getUnresolved() { + if ( hasUnresolved() ) { + return ImmutableSet.copyOf(unresolvedMigrators); + } + return ImmutableSet.of(); + } + + public Map getAlreadyRun() { + return runMigrators; + } +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/FileRedoLogger.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/FileRedoLogger.java new file mode 100644 index 00000000..958f8143 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/FileRedoLogger.java @@ -0,0 +1,133 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.io.DataOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.Feedback; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientListener; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +public class FileRedoLogger implements StorageClientListener { + + + + private static final Map EMPTY_MAP = ImmutableMap.of(); + private static final Logger LOGGER = LoggerFactory.getLogger(FileRedoLogger.class); + private Map> logMap = Maps.newLinkedHashMap(); + private File redoLocation; + private File currentFile; + private DataOutputStream dos; + private DateFormat logFileNameFormat; + private int maxLogFileSize; + private Feedback feedback; + + public FileRedoLogger(String redoLogLocation, int maxLogFileSize, Feedback feedback) { + this.maxLogFileSize = maxLogFileSize; + logFileNameFormat = new SimpleDateFormat("yyyyMMddHHmmssZ"); + this.feedback = feedback; + this.redoLocation = new File(redoLogLocation,logFileNameFormat.format(new Date())); + if( !this.redoLocation.exists() ) { + if (!this.redoLocation.mkdirs() ) { + throw new IllegalArgumentException("Unable to create redo log at "+this.redoLocation.getPath()); + } + } + + } + + + public void delete(String keySpace, String columnFamily, String key) { + logMap.put(getKey(keySpace, columnFamily, key, "d"), EMPTY_MAP); + } + + public void after(String keySpace, String columnFamily, String key, Map mapAfter) { + logMap.put(getKey(keySpace, columnFamily, key, "a"), mapAfter); + } + + + public void before(String keySpace, String columnFamily, String key, + Map mapBefore) { + logMap.put(getKey(keySpace, columnFamily, key, "b"), mapBefore); + } + + public void commit() { + try { + LogFileRecord.write(getCurrentRedoLogStream(),true, logMap); + } catch (IOException e) { + LOGGER.error(e.getMessage(),e); + } + logMap.clear(); + } + + + public void begin() { + logMap.clear(); + } + + public void rollback() { + try { + LogFileRecord.write(getCurrentRedoLogStream(),false, logMap); + } catch (IOException e) { + LOGGER.error(e.getMessage(),e); + } + logMap.clear(); + } + + private String getKey(String keySpace, String columnFamily, String key, String command) { + return command+":"+keySpace+":"+columnFamily+":"+key; + } + + + + + + private DataOutputStream getCurrentRedoLogStream() throws IOException { + if ( dos == null ) { + currentFile = getNewLogFile(); + dos = new DataOutputStream(new FileOutputStream(currentFile)); + feedback.newLogFile(currentFile); + } else if ( dos.size() > maxLogFileSize ) { + dos.flush(); + dos.close(); + dos = null; + currentFile = getNewLogFile(); + dos = new DataOutputStream(new FileOutputStream(currentFile)); + feedback.newLogFile(currentFile); + } + return dos; + } + + private File getNewLogFile() { + int i = 0; + File f = new File(redoLocation, logFileNameFormat.format(new Date())+"-"+i+".log"); + while ( f.exists() ) { + i++; + f = new File(redoLocation, logFileNameFormat.format(new Date())+"-"+i+".log"); + } + return f; + } + + public void close() throws IOException { + if ( dos != null ) { + dos.flush(); + dos.close(); + dos = null; + } + } + + + public File getLocation() { + return redoLocation; + } + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/FileRedoReader.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/FileRedoReader.java new file mode 100644 index 00000000..28b448c1 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/FileRedoReader.java @@ -0,0 +1,58 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.io.DataInputStream; +import java.io.EOFException; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; +import com.google.common.collect.Ordering; + +public class FileRedoReader { + + private static final Logger LOGGER = LoggerFactory.getLogger(FileRedoLogger.class); + private File location; + + public FileRedoReader(File location) { + this.location = location; + } + + public void analyse() throws IOException { + List sortedFileList = Ordering.from(new Comparator() { + public int compare(File arg0, File arg1) { + return arg0.getAbsolutePath().compareTo(arg1.getAbsolutePath()); + } + }).sortedCopy(ImmutableList.copyOf(location.listFiles())); + for (File f : sortedFileList) { + try { + DataInputStream din = new DataInputStream(new FileInputStream(f)); + Map> logMap = Maps.newLinkedHashMap(); + for (;;) { + + if (LogFileRecord.read(din, logMap)) { + LOGGER.info("Committed {} ", logMap); + } else { + LOGGER.info("Not Committed {} ", logMap); + } + } + } catch (FileNotFoundException e1) { + LOGGER.info("Log File Missing Reading {} ", f); + } catch (EOFException e) { + LOGGER.info("Finished Reading {} ", f); + } catch (IOException e) { + LOGGER.info("Error Reading {} {} ", f, e.getMessage()); + throw e; + } + } + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/LogFileRecord.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/LogFileRecord.java new file mode 100644 index 00000000..efa5b325 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/LogFileRecord.java @@ -0,0 +1,57 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.lite.storage.spi.types.Types; + +import com.google.common.collect.Maps; + +public class LogFileRecord { + + private static final String START_MARKER = "<"; + private static final String END_MARKER = ">"; + + public static void write(DataOutputStream dos, boolean committed, + Map> logMap) throws IOException { + dos.writeUTF(START_MARKER); + dos.writeBoolean(committed); + dos.writeInt(logMap.size()); + for ( Entry> e : logMap.entrySet()) { + String k = e.getKey(); + dos.writeUTF(k); + Types.writeMapToStream(e.getValue(), dos); + String[] parts = StringUtils.split(k,":",3); + String columnFamily = parts[2]; + dos.writeUTF(columnFamily); + } + dos.writeUTF(END_MARKER); + } + + public static boolean read(DataInputStream din, Map> logMap) throws IOException { + logMap.clear(); + if ( !START_MARKER.equals(din.readUTF())) { + throw new IllegalStateException("Input Stream Not at start record marker "); + } + boolean committed = din.readBoolean(); + int size = din.readInt(); + for ( int i = 0; i < size; i++ ) { + String k = din.readUTF(); + Map record = Maps.newHashMap(); + Types.readMapFromStream(record, din); + @SuppressWarnings("unused") + String columnFamily = din.readUTF(); + logMap.put(k, record); + } + if ( !END_MARKER.equals(din.readUTF()) ) { + throw new IllegalStateException("Input Stream Not at end record marker after reading record"); + } + return committed; + + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/MigrateContentComponent.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/MigrateContentComponent.java new file mode 100644 index 00000000..06015822 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/MigrateContentComponent.java @@ -0,0 +1,302 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.io.IOException; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.Feedback; +import org.sakaiproject.nakamura.api.lite.MigrateContentService; +import org.sakaiproject.nakamura.api.lite.PropertyMigrationException; +import org.sakaiproject.nakamura.api.lite.PropertyMigrator; +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.lite.SessionImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; +import org.sakaiproject.nakamura.lite.storage.jdbc.Indexer; +import org.sakaiproject.nakamura.lite.storage.jdbc.JDBCStorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.SparseRow; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockSetContentHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.Maps; + +/** + * This component performs migration for JDBC only. It goes direct to the JDBC + * tables to get a lazy iterator of rowIDs direct from the StorageClient which + * it then updates one by one. In general this approach to migration is only + * suitable for the JDBC drivers since they are capable of producing a non in + * memory list of rowids, a migrator that targets the ColumDBs should probably + * use a MapReduce job to perform migration and avoid streaming all data through + * a single node over the network. + * + * At present, the migrator does not record if an item has been migrated. Which + * means if a migration operation is stopped it will have to be restarted from + * the beginning and records that have already been migrated will get + * re-processed. To put a restart facility in place care will need to taken to + * ensure that updates to existing rows and new rows are tracked as well as the + * rows that have already been processed. In addition a performant way of + * querying all objects to get a dense list of items to be migrated. Its not + * impossible but needs some careful thought to make it work on realistic + * datasets (think 100M records+, don't think 10K records) + * + * @author ieb + * + */ +@Component(immediate = true, enabled = true, metatype = true) +@Service(value = MigrateContentService.class) +public class MigrateContentComponent implements MigrateContentService { + + private static final String SYSTEM_MIGRATION_CONTENT_ITEM = "system/migration"; + + private static final String DEFAULT_REDOLOG_LOCATION = "migrationlogs"; + + @Property(value=DEFAULT_REDOLOG_LOCATION) + private static final String PROP_REDOLOG_LOCATION = "redolog-location"; + + private static final int DEFAULT_MAX_LOG_SIZE = 1024000; + + @Property(intValue=DEFAULT_MAX_LOG_SIZE) + private static final String PROP_MAX_LOG_SIZE = "max-redo-log-size"; + + + public interface IdExtractor { + + String getKey(Map properties); + + } + + private static final Logger LOGGER = LoggerFactory.getLogger(MigrateContentComponent.class); + + @Reference + private Repository repository; + + @Reference + private Configuration configuration; + + @Reference + private PropertyMigratorTracker propertyMigratorTracker; + + private String redoLogLocation; + + private Integer maxLogFileSize; + + + + + @Activate + public synchronized void activate(Map properties) throws StorageClientException, + AccessDeniedException, IOException { + redoLogLocation = StorageClientUtils.getSetting(properties.get(PROP_REDOLOG_LOCATION), DEFAULT_REDOLOG_LOCATION); + maxLogFileSize = StorageClientUtils.getSetting(properties.get(PROP_MAX_LOG_SIZE), DEFAULT_MAX_LOG_SIZE); + } + + + public synchronized void migrate(boolean dryRun, int limit, boolean reindexAll, Feedback feedback ) throws ClientPoolException, StorageClientException, AccessDeniedException, IOException, PropertyMigrationException { + SessionImpl session = (SessionImpl) repository.loginAdministrative(); + StorageClient client = session.getClient(); + FileRedoLogger migrateRedoLog = new FileRedoLogger(redoLogLocation, maxLogFileSize, feedback); + client.setStorageClientListener(migrateRedoLog); + try{ + if (client instanceof JDBCStorageClient) { + JDBCStorageClient jdbcClient = (JDBCStorageClient) client; + String keySpace = configuration.getKeySpace(); + + Indexer indexer = jdbcClient.getIndexer(); + + PropertyMigrator[] propertyMigrators = propertyMigratorTracker.getPropertyMigrators(); + + + DependencySequence migratorDependencySequence = getMigratorSequence(session, propertyMigrators); + + for (PropertyMigrator p : migratorDependencySequence) { + LOGGER.info("DryRun:{} Using Property Migrator {} ", dryRun, p); + feedback.log("DryRun:{0} Using Property Migrator {1} ", dryRun, p); + + } + for (PropertyMigrator p : migratorDependencySequence.getUnresolved()) { + LOGGER.info("DryRun:{} Unresolved Property Migrator {} ", dryRun, p); + feedback.log("DryRun:{0} Unresolved Property Migrator {1} ", dryRun, p); + } + for (Entry p : migratorDependencySequence.getAlreadyRun().entrySet()) { + LOGGER.info("DryRun:{} Migrator Last Run {} ", dryRun, p); + feedback.log("DryRun:{0} Migrator Last Run {1} ", dryRun, p); + } + if ( migratorDependencySequence.hasUnresolved() ) { + throw new PropertyMigrationException("There are unresolved dependencies "+migratorDependencySequence.getUnresolved()); + } + CacheAwareMigrationManager cacheAwareMigrationManager = new CacheAwareMigrationManager(jdbcClient, session.getCache(configuration.getAuthorizableColumnFamily())); + reindex(dryRun, jdbcClient, cacheAwareMigrationManager, keySpace, configuration.getAuthorizableColumnFamily(), + indexer, migratorDependencySequence, new IdExtractor() { + + public String getKey(Map properties) { + if (properties.containsKey(Authorizable.ID_FIELD)) { + return (String) properties.get(Authorizable.ID_FIELD); + } + return null; + } + }, limit, feedback, reindexAll); + + cacheAwareMigrationManager = new CacheAwareMigrationManager(jdbcClient, session.getCache(configuration.getContentColumnFamily())); + reindex(dryRun, jdbcClient, cacheAwareMigrationManager, keySpace, configuration.getContentColumnFamily(), indexer, + migratorDependencySequence, new IdExtractor() { + + public String getKey(Map properties) { + if (properties.containsKey(BlockSetContentHelper.CONTENT_BLOCK_ID)) { + // blocks of a bit stream + return (String) properties + .get(BlockSetContentHelper.CONTENT_BLOCK_ID); + } else if (properties.containsKey(Content.UUID_FIELD)) { + // a content item and content block item + return (String) properties.get(Content.UUID_FIELD); + } else if (properties.containsKey(Content.STRUCTURE_UUID_FIELD)) { + // a structure item + return (String) properties.get(Content.PATH_FIELD); + } + return null; + } + }, limit, feedback, reindexAll); + + cacheAwareMigrationManager = new CacheAwareMigrationManager(jdbcClient, session.getCache(configuration.getAclColumnFamily())); + reindex(dryRun, jdbcClient, cacheAwareMigrationManager, keySpace, configuration.getAclColumnFamily(), indexer, + migratorDependencySequence, new IdExtractor() { + public String getKey(Map properties) { + if (properties.containsKey(AccessControlManagerImpl._KEY)) { + return (String) properties.get(AccessControlManagerImpl._KEY); + } + return null; + } + }, limit, feedback, reindexAll); + + saveMigratorSequence(session, migratorDependencySequence); + + } else { + LOGGER.warn("This class will only re-index content for the JDBCStorageClients"); + } + } finally { + client.setStorageClientListener(null); + migrateRedoLog.close(); + session.logout(); + } + + } + + + private void saveMigratorSequence(SessionImpl session, + DependencySequence migratorDependencySequence) throws AccessDeniedException, + StorageClientException { + Content runMigrators = session.getContentManager().get(SYSTEM_MIGRATION_CONTENT_ITEM); + String ts = String.valueOf(System.currentTimeMillis()); + int i = 0; + if (runMigrators == null) { + Builder b = ImmutableMap.builder(); + for (PropertyMigrator pm : migratorDependencySequence) { + b.put(pm.getName(), ts + ";" + i); + } + runMigrators = new Content(SYSTEM_MIGRATION_CONTENT_ITEM, b.build()); + } else { + for (PropertyMigrator pm : migratorDependencySequence) { + runMigrators.setProperty(pm.getName(), ts + ";" + i); + } + } + session.getContentManager().update(runMigrators); + } + + private DependencySequence getMigratorSequence(SessionImpl session, + PropertyMigrator[] propertyMigrators) throws StorageClientException, + AccessDeniedException { + Content runMigrators = session.getContentManager().get(SYSTEM_MIGRATION_CONTENT_ITEM); + Map runMigratorRecord = ImmutableMap.of(); + if (runMigrators != null) { + runMigratorRecord = runMigrators.getProperties(); + } + return new DependencySequence(propertyMigrators, runMigratorRecord); + } + + + + private void reindex(boolean dryRun, JDBCStorageClient jdbcClient, CacheAwareMigrationManager migrationManager, String keySpace, + String columnFamily, Indexer indexer, DependencySequence propertyMigrators, + IdExtractor idExtractor, int limit, Feedback feedback, boolean reindexAll) throws StorageClientException { + long objectCount = jdbcClient.allCount(keySpace, columnFamily); + LOGGER.info("DryRun:{} Migrating {} objects in {} ", new Object[] { dryRun, objectCount, + columnFamily }); + feedback.log("DryRun:{0} Migrating {1} objects in {2} ", new Object[] { dryRun, objectCount, + columnFamily }); + if (objectCount > 0) { + DisposableIterator allObjects = jdbcClient.listAll(keySpace, columnFamily); + try { + long c = 0; + while (allObjects.hasNext()) { + Map statementCache = Maps.newHashMap(); + SparseRow r = allObjects.next(); + c++; + if (c % 1000 == 0) { + LOGGER.info("DryRun:{} {}% remaining {} ", new Object[] { dryRun, + ((c * 100) / objectCount), objectCount - c }); + feedback.progress(dryRun, c, objectCount); + + } + try { + Map properties = r.getProperties(); + String rid = r.getRowId(); + boolean save = false; + for (PropertyMigrator propertyMigrator : propertyMigrators) { + save = propertyMigrator.migrate(rid, properties) || save; + } + String key = idExtractor.getKey(properties); + if (key != null) { + if (!dryRun) { + if (save) { + migrationManager.insert(keySpace, columnFamily, key, properties, + false); + } else if ( reindexAll ) { + indexer.index(statementCache, keySpace, columnFamily, key, rid, + properties); + } + } else { + if (c > limit) { + LOGGER.info("Dry Run Migration Stoped at {} Objects ", limit); + feedback.log("Dry Run Migration Stoped at {0} Objects ", limit); + break; + } + } + } else { + LOGGER.info("DryRun:{} Skipped Reindexing, no key in {}", dryRun, + properties); + feedback.log("DryRun:{0} Skipped Reindexing, no key in {1}", dryRun, + properties); + } + } catch (SQLException e) { + LOGGER.warn(e.getMessage(), e); + feedback.exception(e); + } catch (StorageClientException e) { + LOGGER.warn(e.getMessage(), e); + feedback.exception(e); + } finally { + jdbcClient.closeStatementCache(statementCache); + } + } + } finally { + allObjects.close(); + } + } + } +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/PropertyMigratorTracker.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/PropertyMigratorTracker.java new file mode 100644 index 00000000..e11a817e --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/PropertyMigratorTracker.java @@ -0,0 +1,17 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import org.sakaiproject.nakamura.api.lite.PropertyMigrator; + +/** + * An internal API so that the MigrateContentComponent can exist disabled, but + * when it starts bind to the PropertyMigratorTrackerService which is enabled + * and will hold all the PropertyMigrators in the system. + * + * @author ieb + * + */ +public interface PropertyMigratorTracker { + + PropertyMigrator[] getPropertyMigrators(); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/PropertyMigratorTrackerService.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/PropertyMigratorTrackerService.java new file mode 100644 index 00000000..86e82af7 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/PropertyMigratorTrackerService.java @@ -0,0 +1,53 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.util.Set; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.ReferenceCardinality; +import org.apache.felix.scr.annotations.ReferencePolicy; +import org.apache.felix.scr.annotations.ReferenceStrategy; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.PropertyMigrator; + +import com.google.common.collect.Sets; + +/** + * The PropertyMigratorTracker service tracks unique PropertyMigrators + * efficiently and stores them should an operator want to activate the + * MigrateContentComponent and perform a migration. The reason this Component is + * here is so that its live in the system as early as possible and it can track + * any PropertyMigrators that have been provided by other bundles. If it were + * not active it would not be able to track, and there is a danger, depending on + * which OSGi container is being used, that some PropertyMigrators might not get + * registered. + * + * @author ieb + * + */ +@Component(immediate = true, metatype = true) +@Service(value = PropertyMigratorTracker.class) +@Reference(cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, name = "propertyMigrator", referenceInterface = PropertyMigrator.class, policy = ReferencePolicy.DYNAMIC, strategy = ReferenceStrategy.EVENT, bind = "bind", unbind = "unbind") +public class PropertyMigratorTrackerService implements PropertyMigratorTracker { + + private Set propertyMigrators = Sets.newHashSet(); + + public PropertyMigrator[] getPropertyMigrators() { + synchronized (propertyMigrators) { + return propertyMigrators.toArray(new PropertyMigrator[propertyMigrators.size()]); + } + } + + public void bind(PropertyMigrator pm) { + synchronized (propertyMigrators) { + propertyMigrators.add(pm); + } + } + + public void unbind(PropertyMigrator pm) { + synchronized (propertyMigrators) { + propertyMigrators.remove(pm); + } + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClient.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClient.java new file mode 100644 index 00000000..d43c1a83 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClient.java @@ -0,0 +1,340 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.mem; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterators; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.SparseMapRow; +import org.sakaiproject.nakamura.lite.storage.spi.SparseRow; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientListener; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockSetContentHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +public class MemoryStorageClient implements StorageClient { + + private static final Logger LOGGER = LoggerFactory.getLogger(MemoryStorageClient.class); + private static final Set INDEX_COLUMNS = ImmutableSet.of( + "au:rep:principalName", + "au:type" + ); + + private static final Set AUTO_INDEX_COLUMNS = ImmutableSet.of( + "cn:_:parenthash", + "au:_:parenthash", + "ac:_:parenthash"); + + Map store; + private int blockSize; + private int maxChunksPerBlockSet; + private BlockContentHelper contentHelper; + private MemoryStorageClientPool pool; + + public MemoryStorageClient(MemoryStorageClientPool pool, + Map store, Map properties) { + this.store = store; + this.pool = pool; + contentHelper = new BlockSetContentHelper(this); + blockSize = StorageClientUtils.getSetting( + properties.get(BlockSetContentHelper.CONFIG_BLOCK_SIZE), + BlockSetContentHelper.DEFAULT_BLOCK_SIZE); + maxChunksPerBlockSet = StorageClientUtils.getSetting( + properties.get(BlockSetContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK), + BlockSetContentHelper.DEFAULT_MAX_CHUNKS_PER_BLOCK); + + } + + public void close() { + pool.releaseClient(this); + } + + public void destroy() { + } + + public Map get(String keySpace, String columnFamily, String key) + throws StorageClientException { + return (Map) getOrCreateRow(keySpace, columnFamily, key); + } + + private Map getOrCreateRow(String keySpace, String columnFamily, String key) { + String keyName = rowHash(keySpace, columnFamily, key); + + if (!store.containsKey(keyName)) { + Map row = Maps.newConcurrentMap(); + store.put(keyName, row); + LOGGER.debug("Created {} as {} ", new Object[] { keyName, row }); + return row; + } + @SuppressWarnings("unchecked") + Map row = (Map) store.get(keyName); + LOGGER.debug("Got {} as {} ", new Object[] { keyName, row }); + return row; + } + + public String rowHash(String keySpace, String columnFamily, String key) { + return keySpace + ":" + columnFamily + ":" + key; + } + + public String keyHash(String keySpace, String columnFamily, String columnKey, Object columnValue) { + return "_"+keySpace + ":" + columnFamily + ":" + columnKey + ":"+columnValue; + } + + public void insert(String keySpace, String columnFamily, String key, Map values, boolean probablyNew) + throws StorageClientException { + Map row = get(keySpace, columnFamily, key); + + for (Entry e : values.entrySet()) { + Object value = e.getValue(); + if (value instanceof byte[]) { + byte[] bvalue = (byte[]) e.getValue(); + byte[] nvalue = new byte[bvalue.length]; + System.arraycopy(bvalue, 0, nvalue, 0, bvalue.length); + value = nvalue; + } + if (value == null || value instanceof RemoveProperty) { + Object previous = row.remove(e.getKey()); + removeIndex(keySpace, columnFamily, key, e.getKey(), previous); + } else { + Object previous = row.put(e.getKey(), value); + removeIndex(keySpace, columnFamily, key, e.getKey(), previous); + addIndex(keySpace, columnFamily, key, e.getKey(), e.getValue()); + } + } + LOGGER.debug("Updated {} {} ", key, row); + } + + + private void addIndex(String keySpace, String columnFamily, String key, String columnKey, + Object value) { + if ( INDEX_COLUMNS.contains(columnFamily+":"+columnKey)) { + addIndexValue(keySpace, columnFamily, key, columnKey, value); + } + if ( !StorageClientUtils.isRoot(key) ) { + addIndexValue(keySpace, columnFamily, key, Content.PARENT_HASH_FIELD, (rowHash(keySpace, columnFamily,StorageClientUtils.getParentObjectPath(key)))); + } + } + + + private void addIndexValue(String keySpace, String columnFamily, String key, String columnKey, + Object columnValue) { + String indexKey = keyHash(keySpace,columnFamily, columnKey, columnValue); + @SuppressWarnings("unchecked") + Set index = (Set) store.get(indexKey); + if ( index == null ) { + index = Sets.newSetFromMap(new ConcurrentHashMap()); + store.put(indexKey, index); + } + index.add(rowHash(keySpace,columnFamily, key)); + } + + private void removeIndex(String keySpace, String columnFamily, String key, String columnKey, Object columnValue) { + @SuppressWarnings("unchecked") + Set index = (Set) store.get(keyHash(keySpace,columnFamily, columnKey, columnValue)); + if ( index != null ) { + index.remove(rowHash(keySpace, columnFamily, key)); + } + } + + public void remove(String keySpace, String columnFamily, String key) + throws StorageClientException { + String keyName = rowHash(keySpace, columnFamily, key); + if (store.containsKey(keyName)) { + @SuppressWarnings("unchecked") + Map previous = (Map) store.remove(keyName); + for( Entry e : previous.entrySet() ) { + removeIndex(keySpace, columnFamily, key, e.getKey(), e.getValue()); + } + } + } + + public Map streamBodyIn(String keySpace, String contentColumnFamily, + String contentId, String contentBlockId, String streamId, Map content, InputStream in) + throws StorageClientException, AccessDeniedException, IOException { + return contentHelper.writeBody(keySpace, contentColumnFamily, contentId, contentBlockId, streamId, + blockSize, maxChunksPerBlockSet, in); + } + + public InputStream streamBodyOut(String keySpace, String contentColumnFamily, String contentId, + String contentBlockId, String streamId, Map content) throws StorageClientException, + AccessDeniedException { + + int nBlocks = toInt(content.get(Content.NBLOCKS_FIELD)); + return contentHelper.readBody(keySpace, contentColumnFamily, contentBlockId, streamId, nBlocks); + } + + public boolean hasBody(Map content, String streamId) { + return contentHelper.hasBody(content, streamId); + } + + private int toInt(Object object) { + if ( object instanceof Integer) { + return ((Integer) object).intValue(); + } + return 0; + } + + public DisposableIterator> find(String keySpace, + String columnFamily, Map properties, DirectCacheAccess cachingManager) { + List> matchingSets = Lists.newArrayList(); + for (Entry e : properties.entrySet()) { + Object v = e.getValue(); + String k = e.getKey(); + if ( shouldIndex(keySpace, columnFamily, k) ) { + if (v != null) { + @SuppressWarnings("unchecked") + Set matches = (Set) store.get(keyHash(keySpace, columnFamily, e.getKey(), e.getValue())); + LOGGER.debug("Searching for {} found {} ",keyHash(keySpace, columnFamily, e.getKey(), e.getValue()), matches); + if ( matches != null) { + matchingSets.add(matches); + } + } + } else { + LOGGER.warn("Search on {}:{} is not supported, filter dropped ",columnFamily,k); + } + } + + // find the union of all matching sets, using set views to build a tree of sets. This will be lazy iterating. + Set setOfRowHashes = null; + for ( Set m : matchingSets) { + if ( setOfRowHashes == null ) { + setOfRowHashes = m; + } else { + setOfRowHashes = Sets.intersection(setOfRowHashes, m); + } + } + LOGGER.debug("Matching Rowhashes is {} ", setOfRowHashes); + + Iterator iterator = null; + if ( setOfRowHashes == null ) { + iterator = Iterators.emptyIterator(); + } else { + iterator = setOfRowHashes.iterator(); + } + final Iterator matchedRowIds = iterator; + return new PreemptiveIterator>() { + + private Map nextMap; + + @SuppressWarnings("unchecked") + @Override + protected boolean internalHasNext() { + while(matchedRowIds.hasNext()) { + nextMap = (Map) store.get(matchedRowIds.next()); + if ( nextMap != null ) { + return true; + } + } + nextMap = null; + super.close(); + return false; + } + + @Override + protected Map internalNext() { + return nextMap; + } + }; + } + + private boolean shouldIndex(String keySpace, String columnFamily, String k) { + if ( AUTO_INDEX_COLUMNS.contains(columnFamily+":"+k) || INDEX_COLUMNS.contains(columnFamily+":"+k) ) { + return true; + } + return false; + } + + public DisposableIterator> listChildren(String keySpace, + String columnFamily, String key, DirectCacheAccess cachingManager) throws StorageClientException { + String hash = rowHash(keySpace, columnFamily, key); + LOGGER.debug("Finding {}:{}:{} as {} ",new Object[]{keySpace,columnFamily, key, hash}); + return find(keySpace, columnFamily, ImmutableMap.of(Content.PARENT_HASH_FIELD, (Object)hash), cachingManager); + } + + public DisposableIterator listAll(String keySpace, String columnFamily) { + final Iterator> entries = store.entrySet().iterator(); + final String keyMatch = keySpace+":"+columnFamily+":"; + return new PreemptiveIterator() { + + private SparseRow nextRow = null; + + @SuppressWarnings("unchecked") + @Override + protected boolean internalHasNext() { + while(entries.hasNext()) { + Entry e = entries.next(); + if ( e.getKey().startsWith(keyMatch)) { + MapnextMap = (Map) e.getValue(); + if ( nextMap != null ) { + nextRow = new SparseMapRow(e.getKey(),nextMap); + return true; + } + } + } + nextRow = null; + super.close(); + return false; + } + + @Override + protected SparseRow internalNext() { + return nextRow; + } + }; + } + + public long allCount(String keySpace, String columnFamily) { + long count = 0; + DisposableIterator allRows = listAll(keySpace, columnFamily); + while (allRows.hasNext()) { + allRows.next(); + count++; + } + return count; + } + + public void setStorageClientListener(StorageClientListener storageClientListener) { + // TODO Auto-generated method stub + + } + + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClientPool.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClientPool.java similarity index 75% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClientPool.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClientPool.java index 2c1eb028..4ab3af44 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClientPool.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClientPool.java @@ -17,19 +17,19 @@ */ package org.sakaiproject.nakamura.lite.storage.mem; +import java.util.Map; + import org.apache.commons.pool.BasePoolableObjectFactory; import org.apache.commons.pool.PoolableObjectFactory; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Service; -import org.sakaiproject.nakamura.api.lite.CacheHolder; import org.sakaiproject.nakamura.api.lite.StorageCacheManager; -import org.sakaiproject.nakamura.lite.storage.AbstractClientConnectionPool; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.AbstractClientConnectionPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; +import com.google.common.collect.Maps; @Component(enabled = false, metatype = true, inherit = true) @Service(value = StorageClientPool.class) @@ -37,12 +37,12 @@ public class MemoryStorageClientPool extends AbstractClientConnectionPool { public static class ClientConnectionPoolFactory extends BasePoolableObjectFactory { - private Map> store; + private Map store; private Map properties; private MemoryStorageClientPool pool; public ClientConnectionPoolFactory(MemoryStorageClientPool pool, - Map> store, Map properties) { + Map store, Map properties) { this.store = store; this.pool = pool; this.properties = properties; @@ -77,31 +77,16 @@ public boolean validateObject(Object obj) { } - private Map> store; + private Map store; private Map properties; - private StorageCacheManager defaultStorageManagerCache; - + public MemoryStorageClientPool() { - defaultStorageManagerCache = new StorageCacheManager() { - - public Map getContentCache() { - return null; - } - - public Map getAuthorizableCache() { - return null; - } - - public Map getAccessControlCache() { - return null; - } - }; } @Activate public void activate(Map properties) throws ClassNotFoundException { this.properties = properties; - store = new ConcurrentHashMap>(); + store = Maps.newConcurrentMap(); super.activate(properties); } @@ -118,7 +103,7 @@ protected PoolableObjectFactory getConnectionPoolFactory() { public StorageCacheManager getStorageCacheManager() { - return defaultStorageManagerCache; + return null; } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/AbstractClientConnectionPool.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/AbstractClientConnectionPool.java similarity index 57% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/AbstractClientConnectionPool.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/AbstractClientConnectionPool.java index 0c5f948b..698d4089 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/AbstractClientConnectionPool.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/AbstractClientConnectionPool.java @@ -15,7 +15,9 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.storage; +package org.sakaiproject.nakamura.lite.storage.spi; + +import com.google.common.collect.ImmutableSet; import org.apache.commons.pool.PoolableObjectFactory; import org.apache.commons.pool.impl.GenericObjectPool; @@ -23,11 +25,17 @@ import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.lite.storage.spi.types.LongString; +import org.sakaiproject.nakamura.lite.storage.spi.types.StringType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; +import java.util.Set; @Component(componentAbstract = true) public abstract class AbstractClientConnectionPool implements StorageClientPool { @@ -53,18 +61,50 @@ public abstract class AbstractClientConnectionPool implements StorageClientPool private static final String MIN_EVICTABLE_IDLE_TIME_MILLIS = "min-evictable-idle-time-millis"; @Property(boolValue = false) private static final String TEST_WHILE_IDLE = "test-while-idle"; - @Property(value = "block | fail | grow ") + @Property(value = "grow") private static final String WHEN_EHAUSTED = "when-exhausted-action"; + private static final int DEFAULT_LONG_STRING_SIZE = 16*1024; + @Property(intValue = DEFAULT_LONG_STRING_SIZE) + private static final String LONG_STRING_SIZE = "long-string-size"; + + public static final String DEFAULT_FILE_STORE = "store"; + @Property(value = DEFAULT_FILE_STORE) + public static final String FS_STORE_BASE_DIR = "store-base-dir"; + + /** + * Is ok to store content and long strings in the same location as they are + * identified by sha1 hashes and so unique. If a deplorer decides they want different + * locations, they can configure. + */ + @Property(value = DEFAULT_FILE_STORE) + private static final String LONG_STRING_STORE_BASE = "long-string-base-dir"; + + + @Reference + private Configuration configuration; + + private Set indexColumns; + + + private GenericObjectPool pool; + private Set indexColumnsTypes; + public AbstractClientConnectionPool() { } @Activate public void activate(Map properties) throws ClassNotFoundException { - int maxActive = getProperty(properties.get(MAX_ACTIVE), 200); + // for testing purposes + if ( configuration == null ) { + configuration = (Configuration) properties.get(Configuration.class.getName()); + } + indexColumns = ImmutableSet.copyOf(configuration.getIndexColumnNames()); + indexColumnsTypes = ImmutableSet.copyOf(configuration.getIndexColumnTypes()); + int maxActive = StorageClientUtils.getSetting(properties.get(MAX_ACTIVE), 200); byte whenExhaustedAction = GenericObjectPool.DEFAULT_WHEN_EXHAUSTED_ACTION; String whenExhausted = (String) properties.get(WHEN_EHAUSTED); if ("fail".equals(whenExhausted)) { @@ -74,30 +114,31 @@ public void activate(Map properties) throws ClassNotFoundExcepti } else if ("block".equals(whenExhausted)) { whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_BLOCK; } - long maxWait = getProperty(properties.get(MAX_WAIT), 10L); - int maxIdle = getProperty(properties.get(MAX_IDLE), 5); - boolean testOnBorrow = getProperty(properties.get(TEST_ON_BORROW), true); - boolean testOnReturn = getProperty(properties.get(TEST_ON_RETURN), true); - long timeBetweenEvictionRunsMillis = getProperty( + long maxWait = StorageClientUtils.getSetting(properties.get(MAX_WAIT), 10L); + int maxIdle = StorageClientUtils.getSetting(properties.get(MAX_IDLE), 5); + boolean testOnBorrow = StorageClientUtils.getSetting(properties.get(TEST_ON_BORROW), true); + boolean testOnReturn = StorageClientUtils.getSetting(properties.get(TEST_ON_RETURN), true); + long timeBetweenEvictionRunsMillis = StorageClientUtils.getSetting( properties.get(TIME_BETWEEN_EVICTION_RUNS_MILLIS), 60000L); - int numTestsPerEvictionRun = getProperty(properties.get(NUM_TESTS_PER_EVICTION_RUN), 1000); - long minEvictableIdleTimeMillis = getProperty( + int numTestsPerEvictionRun = StorageClientUtils.getSetting(properties.get(NUM_TESTS_PER_EVICTION_RUN), 1000); + long minEvictableIdleTimeMillis = StorageClientUtils.getSetting( properties.get(MIN_EVICTABLE_IDLE_TIME_MILLIS), 10000L); - boolean testWhileIdle = getProperty(properties.get(TEST_WHILE_IDLE), false); + boolean testWhileIdle = StorageClientUtils.getSetting(properties.get(TEST_WHILE_IDLE), false); pool = new GenericObjectPool(getConnectionPoolFactory(), maxActive, whenExhaustedAction, maxWait, maxIdle, testOnBorrow, testOnReturn, timeBetweenEvictionRunsMillis, numTestsPerEvictionRun, minEvictableIdleTimeMillis, testWhileIdle); + + // set the maximum size of a string, if this is not 0, strings over this size will become files. + StringType.setLengthLimit(StorageClientUtils.getSetting(properties.get(LONG_STRING_SIZE),DEFAULT_LONG_STRING_SIZE)); + // location of the long string store. + LongString + .setBase(StorageClientUtils.getSetting(properties.get(LONG_STRING_STORE_BASE), + StorageClientUtils.getSetting(properties.get(FS_STORE_BASE_DIR), + DEFAULT_FILE_STORE))); } - @SuppressWarnings("unchecked") - private T getProperty(Object o, T l) { - if (o == null) { - return l; - } - return (T) o; - } protected abstract PoolableObjectFactory getConnectionPoolFactory(); @@ -112,6 +153,15 @@ public void deactivate(Map properties) { } } + public Set getIndexColumns() { + return indexColumns; + } + + public Set getIndexColumnsTypes() { + return indexColumnsTypes; + } + + /* * (non-Javadoc) * @@ -120,9 +170,11 @@ public void deactivate(Map properties) { */ public StorageClient getClient() throws ClientPoolException { try { - return (StorageClient) pool.borrowObject(); + StorageClient client = (StorageClient) pool.borrowObject(); + LOGGER.debug("Borrowed storage client pool client:" + client); + return client; } catch (Exception e) { - LOGGER.warn(e.getMessage(),e); + LOGGER.warn("Failed To Borrow connection from pool {} ",e.getMessage()); throw new ClientPoolException("Failed To Borrow connection from pool ", e); } } @@ -137,10 +189,11 @@ public void releaseClient(StorageClient client) { try { if (client != null) { pool.returnObject(client); + LOGGER.debug("Released storage client pool client:" + client); } } catch (Exception e) { LOGGER.warn("Failed to close connection " + e.getMessage(), e); } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/CachableDisposableIterator.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/CachableDisposableIterator.java new file mode 100644 index 00000000..e7dce3b9 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/CachableDisposableIterator.java @@ -0,0 +1,17 @@ +package org.sakaiproject.nakamura.lite.storage.spi; + +import java.util.Map; + +/** + * These iterators can be cached. They must provide a results map that may be + * used later to create a new iterator from the cached results. + * + * @author ieb + * + * @param + */ +public interface CachableDisposableIterator extends DisposableIterator { + + Map getResultsMap(); + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/ConcurrentLRUMap.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/ConcurrentLRUMap.java similarity index 99% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/ConcurrentLRUMap.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/ConcurrentLRUMap.java index 243a35c8..d1e1e449 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/ConcurrentLRUMap.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/ConcurrentLRUMap.java @@ -15,7 +15,7 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.storage; +package org.sakaiproject.nakamura.lite.storage.spi; import java.util.ArrayList; import java.util.Collection; diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/DirectCacheAccess.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/DirectCacheAccess.java new file mode 100644 index 00000000..cc625b5f --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/DirectCacheAccess.java @@ -0,0 +1,21 @@ +package org.sakaiproject.nakamura.lite.storage.spi; + +import org.sakaiproject.nakamura.api.lite.CacheHolder; + +/** + * Caching Managers that implement this interface use the same keys in the cache + * as is used by the underlying storage client and so the underlying storage + * client can directly access the cache with its own cache keys. + * + * @author ieb + * + */ +public interface DirectCacheAccess { + + void putToCache(String cacheKey, CacheHolder cacheHolder); + + void putToCache(String cacheKey, CacheHolder cacheHolder, boolean respectDeletes); + + CacheHolder getFromCache(String cacheKey); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/Disposable.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/Disposable.java new file mode 100644 index 00000000..f1ab7cb1 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/Disposable.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.spi; + +/** + * Things that are disposable, must be closed. If they are not, resources will + * be exhausted and OOM errors may appear. This interface is not an API + * interface and its an internal responsibility of this bundle/jar to dispose of + * a Disposable. No code should require clients of this bundle to dispose of a + * Disposable. + * + * @author ieb + * + */ +public interface Disposable { + + void close(); + + void setDisposer(Disposer disposer); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/DisposableIterator.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/DisposableIterator.java new file mode 100644 index 00000000..7d900d0d --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/DisposableIterator.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.spi; + +import java.util.Iterator; + +/** + * Disposable Iterators must be closed when they have been used. If they are + * registered with a disposer, they should be disposed of by the disposer and + * there is no requirement for the user of the Iterator to dispose of the + * iterator. Failure to dispose a DispsableIterator will cause resource + * exhaustion. eg out of SQL cursors, out of files, out of memory. + * + * @author ieb + * + * @param + */ +public interface DisposableIterator extends Iterator, Disposable { +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/Disposer.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/Disposer.java new file mode 100644 index 00000000..efa82cbe --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/Disposer.java @@ -0,0 +1,24 @@ +package org.sakaiproject.nakamura.lite.storage.spi; + +/** + * Things that implement Disposer can dispose {@link Disposable}s + * @author ieb + * + */ +public interface Disposer { + + /** + * Unregister the disposable + * @param disposable + */ + void unregisterDisposable(Disposable disposable); + + /** + * register the Disposable. + * @param + * @param disposable + * @return the disposable just registered. + */ + T registerDisposable(T disposable); + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/ReferenceCountThreadLocal.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/ReferenceCountThreadLocal.java similarity index 97% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/ReferenceCountThreadLocal.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/ReferenceCountThreadLocal.java index cb1ee21e..b21c85bf 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/ReferenceCountThreadLocal.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/ReferenceCountThreadLocal.java @@ -15,7 +15,7 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.storage; +package org.sakaiproject.nakamura.lite.storage.spi; /** * This class need to bind weakly to a thread, so when the thread dies, the diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/RowHasher.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/RowHasher.java new file mode 100644 index 00000000..e38cbac4 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/RowHasher.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.spi; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; + +/** + * Things that implement RowHasher hash storage locations into a more compact form. + * @author ieb + * + */ +public interface RowHasher { + + /** + * Generate rowIds. + * @param keySpace + * @param columnFamily + * @param key + * @return an id for the row based on the parameters and a hashing/encoding strategy + * @throws StorageClientException + */ + String rowHash(String keySpace, String columnFamily, String key) throws StorageClientException; + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/SparseMapRow.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/SparseMapRow.java new file mode 100644 index 00000000..7c739a78 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/SparseMapRow.java @@ -0,0 +1,25 @@ +package org.sakaiproject.nakamura.lite.storage.spi; + +import java.util.Map; + + +public class SparseMapRow implements SparseRow { + + private String rid; + private Map values; + + public SparseMapRow(String rid, Map values) { + this.rid = rid; + this.values = values; + } + + + public String getRowId() { + return rid; + } + + public Map getProperties() { + return values; + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/SparseRow.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/SparseRow.java new file mode 100644 index 00000000..1760c91a --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/SparseRow.java @@ -0,0 +1,11 @@ +package org.sakaiproject.nakamura.lite.storage.spi; + +import java.util.Map; + +public interface SparseRow { + + String getRowId(); + + Map getProperties(); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/StorageClient.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/StorageClient.java new file mode 100644 index 00000000..c2bb1862 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/StorageClient.java @@ -0,0 +1,178 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.spi; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; + +/** + * Implementations of the SPI need to implement a {@link StorageClientPool} that + * pools {@link StorageClient}s. + * + * @author ieb + * + */ +public interface StorageClient { + + /** + * Where an object is deleted, in the repository but still exists in the storage + * It will be marked with "Y" in the deleted field. + * @since 1.5 + */ + public static final String DELETED_FIELD = Repository.SYSTEM_PROP_PREFIX + "deleted"; + /** + * true, for above. + * @since 1.5 + */ + public static final String TRUE = "Y"; + + /** + * Lookup an object by key + * @param keySpace the keyspace to search + * @param columnFamily the group of columns we're considering + * @param key the key of the row + * @return the key value pairs in the row key or null + * @throws StorageClientException + */ + Map get(String keySpace, String columnFamily, String key) + throws StorageClientException; + + /** + * Insert or update a row in the store. + * @param keySpace the keyspace to search + * @param columnFamily the group of columns we're considering + * @param key the key of the row + * @param values the Map of column values to associate with this key + * @param probablyNew whether or not the row is probably new + * @throws StorageClientException + */ + void insert(String keySpace, String columnFamily, String key, Map values, boolean probablyNew) + throws StorageClientException; + + /** + * Remove a row in the store. + * @param keySpace the keyspace to search + * @param columnFamily the group of columns we're considering + * @param key the key of the row + * @throws StorageClientException + */ + void remove(String keySpace, String columnFamily, String key) throws StorageClientException; + + /** + * Get an {@link InputStream} to read a stream of content. + * @param keySpace the keyspace to search + * @param columnFamily the group of columns we're considering + * @param contentId the id of the content item + * @param contentBlockId the block offset + * @param streamId the id of the correct stream for this piece of content + * @param content the properties of the content item + * @return an stream that will read the block + * @throws StorageClientException + * @throws AccessDeniedException + * @throws IOException + */ + InputStream streamBodyOut(String keySpace, String columnFamily, String contentId, + String contentBlockId, String streamId, Map content) throws StorageClientException, + AccessDeniedException, IOException; + + /** + * Write in the body of a piece of content. + * @param keySpace the keyspace to search + * @param columnFamily the group of columns we're considering + * @param contentId the id of the content item + * @param contentBlockId the block offset + * @param streamId the id of the correct stream for this piece of content + * @param content the properties of the content item + * @param in a stream pointing to the data + * @return the content item after the write it will be modified. + * @throws StorageClientException + * @throws AccessDeniedException + * @throws IOException + */ + Map streamBodyIn(String keySpace, String columnFamily, String contentId, + String contentBlockId, String streamId, Map content, InputStream in) + throws StorageClientException, AccessDeniedException, IOException; + + /** + * Search for a piece of content. + * + * @param keySpace + * the keyspace to search + * @param authorizableColumnFamily + * the id of the column family + * @param properties + * column and values to search + * @param cachingManager + * if set to a CachingManagerImpl that implements DirectCacheAccess, + * the cache will be consulted before accessing the storage. + * @return an iterator of results + * @throws StorageClientException + */ + DisposableIterator> find(String keySpace, String authorizableColumnFamily, + Map properties, DirectCacheAccess cachingManager) throws StorageClientException; + + /** + * Close this client. + */ + void close(); + + /** + * Find all of the children of a certain node. + * @param keySpace + * @param columnFamily + * @param key the row id + * @return an iterator of content items below this content item + * @throws StorageClientException + */ + DisposableIterator> listChildren(String keySpace, String columnFamily, + String key, DirectCacheAccess cachingManager) throws StorageClientException; + + /** + * Does this content item have a stream body by this id? + * @param content + * @param streamId + * @return whether or not the stream exists for this content item + */ + boolean hasBody( Map content, String streamId); + + /** + * List all objects of the type + * @param keySpace the key space + * @param columnFamily + * @return a Disposable iterator containing all raw objects of the type in question. + * @throws StorageClientException + */ + DisposableIterator listAll(String keySpace, String columnFamily) throws StorageClientException; + + /** + * Count all the objects in a column Family. + * @param keySpace + * @param columnFamily + * @return the number of objects + * @throws StorageClientException + */ + long allCount(String keySpace, String columnFamily) throws StorageClientException; + + void setStorageClientListener(StorageClientListener storageClientListener); + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/StorageClientListener.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/StorageClientListener.java new file mode 100644 index 00000000..d98c2a2b --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/StorageClientListener.java @@ -0,0 +1,63 @@ +package org.sakaiproject.nakamura.lite.storage.spi; + +import java.util.Map; + +/** + * Things that implement this listen to the StorageClient for changes made to + * objects. There is normally only one listener registered at a time to the + * StorageClient instance, and the storage client is not normally shared between + * threads. The StorageClient listener may need to maintain state between calls + * and should ensure that it does not generate a memory leak or throw exceptions + * to any of its method that might get in the way of normal processing. + * + * @author ieb + * + */ +public interface StorageClientListener { + + /** + * Notification the key was staged for deletion. + * + * @param keySpace + * @param columnFamily + * @param key + */ + void delete(String keySpace, String columnFamily, String key); + + /** + * Notification of the state of the map after it has been updated. + * + * @param keySpace + * @param columnFamily + * @param key + * @param mapAfter + */ + void after(String keySpace, String columnFamily, String key, Map mapAfter); + + /** + * Notification of the state of the map before being updated. + * + * @param keySpace + * @param columnFamily + * @param key + * @param mapBefore + */ + void before(String keySpace, String columnFamily, String key, Map mapBefore); + + /** + * The whole transaction since the last begin() has been committed. + */ + void commit(); + + /** + * A new transaction has been started. + */ + void begin(); + + /** + * The whole transaction has been rolled back, almost certainly due to a + * problem with the data. + */ + void rollback(); + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/StorageClientPool.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/StorageClientPool.java similarity index 83% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/StorageClientPool.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/StorageClientPool.java index f2a7bbba..53911b31 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/StorageClientPool.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/StorageClientPool.java @@ -15,11 +15,16 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.storage; +package org.sakaiproject.nakamura.lite.storage.spi; import org.sakaiproject.nakamura.api.lite.ClientPoolException; import org.sakaiproject.nakamura.api.lite.StorageCacheManager; +/** + * This is the main SPI of the Storage SPI. This should be implemented as an OSGi service. + * @author ieb + * + */ public interface StorageClientPool { /** @@ -28,6 +33,9 @@ public interface StorageClientPool { */ StorageClient getClient() throws ClientPoolException; + /** + * @return the object cache for this datastore. + */ StorageCacheManager getStorageCacheManager(); } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/content/BlockContentHelper.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/BlockContentHelper.java similarity index 92% rename from src/main/java/org/sakaiproject/nakamura/lite/content/BlockContentHelper.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/BlockContentHelper.java index a076bf4b..fa0f68ab 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/content/BlockContentHelper.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/BlockContentHelper.java @@ -15,7 +15,7 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.content; +package org.sakaiproject.nakamura.lite.storage.spi.content; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; @@ -36,4 +36,6 @@ Map writeBody(String keySpace, String contentColumnFamily, Strin InputStream readBody(String keySpace, String contentColumnFamily, String contentBlockId, String streamId, int nBlocks) throws StorageClientException, AccessDeniedException; + boolean hasBody(Map content, String streamId); + } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/content/BlockSetContentHelper.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/BlockSetContentHelper.java similarity index 84% rename from src/main/java/org/sakaiproject/nakamura/lite/content/BlockSetContentHelper.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/BlockSetContentHelper.java index 7a46b1cf..aae89c20 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/content/BlockSetContentHelper.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/BlockSetContentHelper.java @@ -15,38 +15,41 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.content; +package org.sakaiproject.nakamura.lite.storage.spi.content; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; +import org.sakaiproject.nakamura.api.lite.Repository; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; import org.sakaiproject.nakamura.api.lite.content.Content; -import org.sakaiproject.nakamura.lite.storage.StorageClient; +import org.sakaiproject.nakamura.lite.content.BlockContentInputStream; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.InputStream; -import java.util.Map; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; public class BlockSetContentHelper implements BlockContentHelper { private static final Logger LOGGER = LoggerFactory.getLogger(BlockSetContentHelper.class); + public static final String CONTENT_BLOCK_ID = Repository.SYSTEM_PROP_PREFIX + "cblockId"; /** * The length of this block */ - public static final String BLOCK_LENGTH_FIELD_STUB = "blockLength:"; + public static final String BLOCK_LENGTH_FIELD_STUB = Repository.SYSTEM_PROP_PREFIX + "blockLength:"; /** * the stub of all bodies 0..numblocks */ - public static final String BODY_FIELD_STUB = "body:"; + public static final String BODY_FIELD_STUB = Repository.SYSTEM_PROP_PREFIX + "body:"; /** * The number of blocks in this block set */ - public static final String NUMBLOCKS_FIELD = "numblocks"; + public static final String NUMBLOCKS_FIELD = Repository.SYSTEM_PROP_PREFIX + "numblocks"; public static final int DEFAULT_BLOCK_SIZE = 1024 * 1024; public static final int DEFAULT_MAX_CHUNKS_PER_BLOCK = 64; @@ -96,7 +99,9 @@ public Map writeBody(String keySpace, String contentColumnFamily length = length + bufferLength; lastBlockWrite = i; client.insert(keySpace, contentColumnFamily, key, ImmutableMap.of(Content.UUID_FIELD, - (Object)contentId, NUMBLOCKS_FIELD, + (Object)contentId, + CONTENT_BLOCK_ID, key, + NUMBLOCKS_FIELD, bodyNum + 1, blockLengthKey, bufferLength, bodyKey, saveBuffer), false); bodyNum++; @@ -126,4 +131,7 @@ public InputStream readBody(String keySpace, String contentColumnFamily, String nBlocks); } + public boolean hasBody(Map content, String streamId) { + return content.containsKey(StorageClientUtils.getAltField(Content.BLOCKID_FIELD, streamId)); + } } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/content/FileStreamContentHelper.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/FileStreamContentHelper.java similarity index 83% rename from src/main/java/org/sakaiproject/nakamura/lite/content/FileStreamContentHelper.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/FileStreamContentHelper.java index 93bc93e6..a08d87a0 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/content/FileStreamContentHelper.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/FileStreamContentHelper.java @@ -15,15 +15,17 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.content; +package org.sakaiproject.nakamura.lite.storage.spi.content; import com.google.common.collect.Maps; import org.apache.commons.io.IOUtils; +import org.sakaiproject.nakamura.api.lite.Repository; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import org.sakaiproject.nakamura.api.lite.content.Content; -import org.sakaiproject.nakamura.lite.storage.RowHasher; +import org.sakaiproject.nakamura.lite.storage.spi.AbstractClientConnectionPool; +import org.sakaiproject.nakamura.lite.storage.spi.RowHasher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,16 +40,14 @@ public class FileStreamContentHelper implements StreamedContentHelper { - private static final String DEFAULT_FILE_STORE = "store"; - private static final String CONFIG_STOREBASE = "store-base-dir"; private static final Logger LOGGER = LoggerFactory.getLogger(FileStreamContentHelper.class); - private static final String STORE_LOCATION = "bodyLocation"; + private static final String STORE_LOCATION_FIELD = Repository.SYSTEM_PROP_PREFIX + "bodyLocation"; private String fileStore; private RowHasher rowHasher; public FileStreamContentHelper(RowHasher rowHasher, Map properties) { - fileStore = StorageClientUtils.getSetting(properties.get(CONFIG_STOREBASE), - DEFAULT_FILE_STORE); + fileStore = StorageClientUtils.getSetting(properties.get(AbstractClientConnectionPool.FS_STORE_BASE_DIR), + AbstractClientConnectionPool.DEFAULT_FILE_STORE); this.rowHasher = rowHasher; } @@ -70,7 +70,7 @@ public Map writeBody(String keySpace, String columnFamily, Strin Map metadata = Maps.newHashMap(); metadata.put(StorageClientUtils.getAltField(Content.LENGTH_FIELD, streamId), length); metadata.put(StorageClientUtils.getAltField(Content.BLOCKID_FIELD, streamId), contentBlockId); - metadata.put(StorageClientUtils.getAltField(STORE_LOCATION, streamId), path); + metadata.put(StorageClientUtils.getAltField(STORE_LOCATION_FIELD, streamId), path); return metadata; } @@ -87,7 +87,7 @@ private String getPath(String keySpace, String columnFamily, String contentBlock public InputStream readBody(String keySpace, String columnFamily, String contentBlockId, String streamId, Map content) throws IOException { - String path = (String) content.get(StorageClientUtils.getAltField(STORE_LOCATION, streamId)); + String path = (String) content.get(StorageClientUtils.getAltField(STORE_LOCATION_FIELD, streamId)); LOGGER.debug("Reading from {} as body of {}:{}:{} ", new Object[] { path, keySpace, columnFamily, contentBlockId }); File file = new File(fileStore + "/" + path); @@ -98,4 +98,11 @@ public InputStream readBody(String keySpace, String columnFamily, String content } } + public boolean hasStream(Map content, String streamId ) { + String path = (String) content.get(StorageClientUtils.getAltField(STORE_LOCATION_FIELD, streamId)); + File file = new File(fileStore + "/" + path); + return file.exists(); + } + + } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/content/StreamedContentHelper.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/StreamedContentHelper.java similarity index 91% rename from src/main/java/org/sakaiproject/nakamura/lite/content/StreamedContentHelper.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/StreamedContentHelper.java index 2b0e857b..afeb890b 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/content/StreamedContentHelper.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/content/StreamedContentHelper.java @@ -15,7 +15,7 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.content; +package org.sakaiproject.nakamura.lite.storage.spi.content; import org.sakaiproject.nakamura.api.lite.StorageClientException; @@ -32,4 +32,7 @@ Map writeBody(String keySpace, String columnFamily, String conte InputStream readBody(String keySpace, String columnFamily, String contentBlockId, String streamId, Map content) throws IOException; + boolean hasStream(Map content, String streamId); + + } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/BigDecimalArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigDecimalArrayType.java similarity index 84% rename from src/main/java/org/sakaiproject/nakamura/lite/types/BigDecimalArrayType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigDecimalArrayType.java index 3db762a5..7adfdde2 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/BigDecimalArrayType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigDecimalArrayType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -32,4 +32,8 @@ public Class getTypeClass() { return BigDecimal[].class; } + public boolean accepts(Object object) { + return (object instanceof BigDecimal[]); + } + } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/BigDecimalType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigDecimalType.java similarity index 78% rename from src/main/java/org/sakaiproject/nakamura/lite/types/BigDecimalType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigDecimalType.java index 82c16eaa..3a0e9469 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/BigDecimalType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigDecimalType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -22,5 +22,10 @@ public BigDecimal load(DataInputStream in) throws IOException { public Class getTypeClass() { return BigDecimal.class; } + + public boolean accepts(Object object) { + return (object instanceof BigDecimal); + } + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigIntegerArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigIntegerArrayType.java new file mode 100644 index 00000000..8a69710f --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigIntegerArrayType.java @@ -0,0 +1,39 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.math.BigInteger; + +public class BigIntegerArrayType implements Type { + + public int getTypeId() { + return 1009; + } + + public void save(DataOutputStream dos, Object object) throws IOException { + BigInteger[] values = (BigInteger[]) object; + dos.writeInt(values.length); + for ( BigInteger s : values) { + dos.writeUTF(s.toString()); + } + } + + public BigInteger[] load(DataInputStream in) throws IOException { + int l = in.readInt(); + BigInteger[] values = new BigInteger[l]; + for ( int i = 0; i < l; i++ ) { + values[i] = new BigInteger(in.readUTF()); + } + return values; + } + + public Class getTypeClass() { + return BigInteger[].class; + } + + public boolean accepts(Object object) { + return (object instanceof BigInteger[]); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigIntegerType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigIntegerType.java new file mode 100644 index 00000000..dec7d271 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BigIntegerType.java @@ -0,0 +1,31 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.math.BigInteger; + +public class BigIntegerType implements Type { + + public int getTypeId() { + return 9; + } + + public void save(DataOutputStream dos, Object value) throws IOException { + dos.writeUTF(((BigInteger)value).toString()); + } + + public BigInteger load(DataInputStream in) throws IOException { + return new BigInteger(in.readUTF()); + } + + public Class getTypeClass() { + return BigInteger.class; + } + + public boolean accepts(Object object) { + return (object instanceof BigInteger); + } + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BooleanArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BooleanArrayType.java new file mode 100644 index 00000000..f97f2ef5 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BooleanArrayType.java @@ -0,0 +1,49 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public class BooleanArrayType implements Type { + + public int getTypeId() { + return 1005; + } + + public void save(DataOutputStream dos, Object object) throws IOException { + Boolean[] values; + if ( object instanceof boolean[] ) { + // sadly, autoboxing does not work for primitive types. + boolean[] primitiveArray = (boolean[]) object; + values = new Boolean[primitiveArray.length]; + for ( int i = 0; i < primitiveArray.length; i++ ) { + values[i] = primitiveArray[i]; + } + } else { + values = (Boolean[]) object; + } + dos.writeInt(values.length); + for ( Boolean s : values) { + dos.writeBoolean(s); + } + } + + public Boolean[] load(DataInputStream in) throws IOException { + int l = in.readInt(); + Boolean[] values = new Boolean[l]; + for ( int i = 0; i < l; i++ ) { + values[i] = in.readBoolean(); + } + return values; + } + + public Class getTypeClass() { + return Boolean[].class; + } + + public boolean accepts(Object object) { + return (object instanceof Boolean[] || object instanceof boolean[]); + } + + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/BooleanType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BooleanType.java similarity index 76% rename from src/main/java/org/sakaiproject/nakamura/lite/types/BooleanType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BooleanType.java index abdd8625..814429af 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/BooleanType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/BooleanType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -21,5 +21,10 @@ public Boolean load(DataInputStream in) throws IOException { public Class getTypeClass() { return Boolean.class; } + + public boolean accepts(Object object) { + return (object instanceof Boolean); + } + } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/CalendarArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/CalendarArrayType.java similarity index 88% rename from src/main/java/org/sakaiproject/nakamura/lite/types/CalendarArrayType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/CalendarArrayType.java index d07fddf1..8a5b4188 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/CalendarArrayType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/CalendarArrayType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -40,4 +40,8 @@ public Class getTypeClass() { return Calendar[].class; } + public boolean accepts(Object object) { + return (object instanceof Calendar[]); + } + } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/CalendarType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/CalendarType.java similarity index 86% rename from src/main/java/org/sakaiproject/nakamura/lite/types/CalendarType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/CalendarType.java index c6b70831..7d44a41a 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/CalendarType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/CalendarType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -32,4 +32,8 @@ public Class getTypeClass() { return Calendar.class; } + public boolean accepts(Object object) { + return (object instanceof Calendar); + } + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/DoubleArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/DoubleArrayType.java new file mode 100644 index 00000000..f03888d2 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/DoubleArrayType.java @@ -0,0 +1,47 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public class DoubleArrayType implements Type { + + public int getTypeId() { + return 1003; + } + + public void save(DataOutputStream dos, Object object) throws IOException { + Double[] values; + if ( object instanceof double[] ) { + // sadly, autoboxing does not work for primitive types. + double[] primitiveArray = (double[]) object; + values = new Double[primitiveArray.length]; + for ( int i = 0; i < primitiveArray.length; i++ ) { + values[i] = primitiveArray[i]; + } + } else { + values = (Double[]) object; + } + dos.writeInt(values.length); + for ( Double s : values) { + dos.writeDouble(s); + } + } + + public Double[] load(DataInputStream in) throws IOException { + int l = in.readInt(); + Double[] values = new Double[l]; + for ( int i = 0; i < l; i++ ) { + values[i] = in.readDouble(); + } + return values; + } + + public Class getTypeClass() { + return Double[].class; + } + + public boolean accepts(Object object) { + return (object instanceof Double[] || object instanceof double[]); + } +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/DoubleType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/DoubleType.java similarity index 77% rename from src/main/java/org/sakaiproject/nakamura/lite/types/DoubleType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/DoubleType.java index 4c6b153b..7ab17da3 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/DoubleType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/DoubleType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -22,4 +22,7 @@ public Class getTypeClass() { return Double.class; } + public boolean accepts(Object object) { + return (object instanceof Double); + } } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/ISO8601DateArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/ISO8601DateArrayType.java new file mode 100644 index 00000000..9f33817d --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/ISO8601DateArrayType.java @@ -0,0 +1,49 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.TimeZone; + +import org.sakaiproject.nakamura.api.lite.util.ISO8601Date; + +public class ISO8601DateArrayType implements Type { + + public int getTypeId() { + return 1008; + } + + public void save(DataOutputStream dos, Object object) throws IOException { + ISO8601Date[] values = (ISO8601Date[]) object; + dos.writeInt(values.length); + for ( ISO8601Date calendar : values) { + dos.writeLong(calendar.getTimeInMillis()); + dos.writeUTF(calendar.getTimeZone().getID()); + dos.writeBoolean(calendar.isDate()); + } + } + + public ISO8601Date[] load(DataInputStream in) throws IOException { + int l = in.readInt(); + ISO8601Date[] values = new ISO8601Date[l]; + for ( int i = 0; i < l; i++ ) { + long millis = in.readLong(); + TimeZone zone = TimeZone.getTimeZone(in.readUTF()); + boolean date = in.readBoolean(); + values[i] = new ISO8601Date(); + values[i].setTimeInMillis(millis); + values[i].setTimeZone(zone); + values[i].setDate(date); + } + return values; + } + + public Class getTypeClass() { + return ISO8601Date[].class; + } + + public boolean accepts(Object object) { + return (object instanceof ISO8601Date[]); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/ISO8601DateType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/ISO8601DateType.java new file mode 100644 index 00000000..c4994679 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/ISO8601DateType.java @@ -0,0 +1,42 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.TimeZone; + +import org.sakaiproject.nakamura.api.lite.util.ISO8601Date; + +public class ISO8601DateType implements Type { + + public int getTypeId() { + return 8; + } + + public void save(DataOutputStream dos, Object object) throws IOException { + ISO8601Date calendar = (ISO8601Date) object; + dos.writeLong(calendar.getTimeInMillis()); + dos.writeUTF(calendar.getTimeZone().getID()); + dos.writeBoolean(calendar.isDate()); + } + + public ISO8601Date load(DataInputStream in) throws IOException { + long millis = in.readLong(); + TimeZone zone = TimeZone.getTimeZone(in.readUTF()); + boolean date = in.readBoolean(); + ISO8601Date calendar = new ISO8601Date(); + calendar.setTimeInMillis(millis); + calendar.setTimeZone(zone); + calendar.setDate(date); + return calendar; + } + + public Class getTypeClass() { + return ISO8601Date.class; + } + + public boolean accepts(Object object) { + return (object instanceof ISO8601Date); + } + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/IntegerArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/IntegerArrayType.java new file mode 100644 index 00000000..8f9314a3 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/IntegerArrayType.java @@ -0,0 +1,47 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public class IntegerArrayType implements Type { + + public int getTypeId() { + return 1002; + } + + public void save(DataOutputStream dos, Object object) throws IOException { + Integer[] values; + if ( object instanceof int[] ) { + // sadly, autoboxing does not work for primitive types. + int[] primitiveArray = (int[]) object; + values = new Integer[primitiveArray.length]; + for ( int i = 0; i < primitiveArray.length; i++ ) { + values[i] = primitiveArray[i]; + } + } else { + values = (Integer[]) object; + } + dos.writeInt(values.length); + for ( Integer s : values) { + dos.writeInt(s); + } + } + + public Integer[] load(DataInputStream in) throws IOException { + Integer l = in.readInt(); + Integer[] values = new Integer[l]; + for ( int i = 0; i < l; i++ ) { + values[i] = in.readInt(); + } + return values; + } + + public Class getTypeClass() { + return Integer[].class; + } + + public boolean accepts(Object object) { + return (object instanceof Integer[] || object instanceof int[]); + } +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/IntegerType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/IntegerType.java similarity index 76% rename from src/main/java/org/sakaiproject/nakamura/lite/types/IntegerType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/IntegerType.java index a7cc6708..680b5f41 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/IntegerType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/IntegerType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -21,5 +21,9 @@ public Integer load(DataInputStream in) throws IOException { public Class getTypeClass() { return Integer.class; } + + public boolean accepts(Object object) { + return (object instanceof Integer); + } } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongArrayType.java new file mode 100644 index 00000000..3fabe4b8 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongArrayType.java @@ -0,0 +1,47 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public class LongArrayType implements Type { + + public int getTypeId() { + return 1001; + } + + public void save(DataOutputStream dos, Object object ) throws IOException { + Long[] values; + if ( object instanceof long[] ) { + // sadly, autoboxing does not work for primitive types. + long[] primitiveArray = (long[]) object; + values = new Long[primitiveArray.length]; + for ( int i = 0; i < primitiveArray.length; i++ ) { + values[i] = primitiveArray[i]; + } + } else { + values = (Long[]) object; + } + dos.writeInt(values.length); + for ( Long s : values) { + dos.writeLong(s); + } + } + + public Long[] load(DataInputStream in) throws IOException { + int l = in.readInt(); + Long[] values = new Long[l]; + for ( int i = 0; i < l; i++ ) { + values[i] = in.readLong(); + } + return values; + } + + public Class getTypeClass() { + return Long[].class; + } + + public boolean accepts(Object object) { + return (object instanceof Long[] || object instanceof long[]); + } +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongString.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongString.java new file mode 100644 index 00000000..cdf69871 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongString.java @@ -0,0 +1,113 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import org.apache.commons.io.IOUtils; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +import java.io.File; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; +import java.lang.ref.WeakReference; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; + +/** + * A string that saves to a file and does not appear in memory. However, there is no garbage collection and strings are immutable at present. + */ +public class LongString { + + + private static final Logger LOGGER = LoggerFactory.getLogger(LongString.class); + private String location; + private WeakReference value; + private long lastModifed = -1; + private static String base; + + + LongString(String location) { + this.location = location; + } + + public static LongString create(String content) throws IOException { + String id = StorageClientUtils.getUuid(); + Calendar c = new GregorianCalendar(); + c.setTimeInMillis(System.currentTimeMillis()); + int year = c.get(Calendar.YEAR); + int month = c.get(Calendar.MONTH); + String location = year + "/" + month + "/" + id.substring(0, 2) + "/" + id.substring(2, 4) + + "/" + id.substring(4, 6) + "/" + id; + LongString ls = new LongString(location); + ls.update(content,true); + return ls; + + } + + public void update(String content, boolean isnew) throws IOException { + File f = new File(base, location); + if ( isnew && f.exists()) { + throw new IOException("LongString Storage file at location "+location+" already exists, this should not happen, nothing stored"); + } + if (!f.getParentFile().exists() ) { + if (!f.getParentFile().mkdirs() ) { + throw new IOException("Failed to create LongString storage space at "+location); + } + } + FileWriter fw = new FileWriter(f); + fw.write(content); + fw.close(); + // re-create the file to ensure the values are updated. + f = new File(base, location); + lastModifed = f.lastModified(); + } + + @Override + public boolean equals(Object obj) { + if ( obj instanceof LongString ) { + return location.equals(((LongString) obj).location); + } + return false; + } + + @Override + public int hashCode() { + return location.hashCode(); + } + + @Override + @SuppressWarnings(value="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE", justification="Invalid report") + public String toString() { + if ( value == null || value.get() == null || lastModifed < 0 ) { + try { + File f = new File(base, location); + if ( value == null || value.get() == null || lastModifed < f.lastModified() ) { + FileReader fr = new FileReader(f); + String v = IOUtils.toString(fr); + fr.close(); + value = new WeakReference(v); + lastModifed = f.lastModified(); + return v; + } else { + return value.get(); + } + } catch ( IOException e) { + LOGGER.error(e.getMessage(),e); + return "ERROR, unable to load LongString body, see error log on server for details at "+String.valueOf(new Date()); + } + } else { + return value.get(); + } + } + + public String getLocation() { + return location; + } + + public static void setBase(String base) { + LongString.base = base; + } +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongStringArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongStringArrayType.java new file mode 100644 index 00000000..64ca11f2 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongStringArrayType.java @@ -0,0 +1,63 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public class LongStringArrayType implements Type { + + + public int getTypeId() { + return 1100; + } + + public void save(DataOutputStream dos, Object object) throws IOException { + LongString[] values = null; + if ( object instanceof String[] ) { + values = new LongString[((String[]) object).length]; + int i = 0; + for ( String s : (String[])object) { + values[i++] = LongString.create(s); + } + } else { + values = (LongString[])object; + } + dos.writeInt(values.length); + for ( LongString ls : values) { + + dos.writeUTF(ls.getLocation()); + } + } + + public LongString[] load(DataInputStream in) throws IOException { + int l = in.readInt(); + LongString[] values = new LongString[l]; + for ( int i = 0; i < l; i++ ) { + values[i] = new LongString(in.readUTF()); + } + return values; + } + + public Class getTypeClass() { + return LongString[].class; + } + + public boolean accepts(Object object) { + if ( object instanceof LongString[] ) { + return true; + } + if (object instanceof String[]) { + if (StringType.getLengthLimit() > 0) { + for ( String s : (String[])object) { + if ( s.length() > StringType.getLengthLimit()) { + return true; + } + } + } + return false; + } + return false; + } + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongStringType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongStringType.java new file mode 100644 index 00000000..04744f80 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongStringType.java @@ -0,0 +1,48 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public class LongStringType implements Type { + + public int getTypeId() { + return 100; + } + + public void save(DataOutputStream dos, Object object) throws IOException { + LongString ls = null; + if ( object instanceof LongString ) { + ls = (LongString) object; + } else { + ls = LongString.create(String.valueOf(object)); + } + dos.writeUTF(ls.getLocation()); + } + + public LongString load(DataInputStream in) throws IOException { + return new LongString(in.readUTF()); + } + + public Class getTypeClass() { + return LongString.class; + } + + public boolean accepts(Object object) { + if ( object instanceof LongString ) { + return true; + } + if ( object instanceof String) { + if (StringType.getLengthLimit() > 0 && ((String) object).length() >= StringType.getLengthLimit() ) { + return true; + } + return false; + } + return false; + } + + + + + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/LongType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongType.java similarity index 76% rename from src/main/java/org/sakaiproject/nakamura/lite/types/LongType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongType.java index 95c0e837..1c7bfa93 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/LongType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/LongType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -22,4 +22,7 @@ public Class getTypeClass() { return Long.class; } + public boolean accepts(Object object) { + return (object instanceof Long); + } } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/RemovePropertyType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/RemovePropertyType.java similarity index 78% rename from src/main/java/org/sakaiproject/nakamura/lite/types/RemovePropertyType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/RemovePropertyType.java index 2d66995a..bbf34a2b 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/RemovePropertyType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/RemovePropertyType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import org.sakaiproject.nakamura.api.lite.RemoveProperty; @@ -23,4 +23,7 @@ public Class getTypeClass() { return RemoveProperty.class; } + public boolean accepts(Object object) { + return (object instanceof RemoveProperty); + } } diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/StringArrayType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/StringArrayType.java similarity index 62% rename from src/main/java/org/sakaiproject/nakamura/lite/types/StringArrayType.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/StringArrayType.java index 5459c33d..24fa5a69 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/StringArrayType.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/StringArrayType.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import java.io.DataInputStream; import java.io.DataOutputStream; @@ -6,6 +6,7 @@ public class StringArrayType implements Type { + public int getTypeId() { return 1000; } @@ -30,5 +31,20 @@ public String[] load(DataInputStream in) throws IOException { public Class getTypeClass() { return String[].class; } + + public boolean accepts(Object object) { + if (object instanceof String[]) { + if (StringType.getLengthLimit() > 0) { + for ( String s : (String[])object) { + if ( s.length() > StringType.getLengthLimit()) { + return false; + } + } + } + return true; + } + return false; + } + } diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/StringType.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/StringType.java new file mode 100644 index 00000000..ff50c9eb --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/StringType.java @@ -0,0 +1,50 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public class StringType implements Type { + + + private static int lengthLimit; + + public int getTypeId() { + return 0; + } + + public void save(DataOutputStream dos, Object object) throws IOException { + dos.writeUTF((String) object); + } + + public String load(DataInputStream in) throws IOException { + return in.readUTF(); + } + + public Class getTypeClass() { + return String.class; + } + + public boolean accepts(Object object) { + if ( object instanceof String) { + if (StringType.lengthLimit > 0 && ((String) object).length() > StringType.lengthLimit ) { + return false; + } + return true; + } + return false; + } + + public static int getLengthLimit() { + return lengthLimit; + } + + public static void setLengthLimit(int stringLengthLimit) { + StringType.lengthLimit = stringLengthLimit; + } + + + + +} diff --git a/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/Type.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/Type.java new file mode 100644 index 00000000..1e2677c5 --- /dev/null +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/Type.java @@ -0,0 +1,43 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public interface Type { + + /** + * @return the type ID of this type. Once a type ID has been assigned to an + * object type it can never be reused. + */ + int getTypeId(); + + /** + * Safe the type to a data output stream + * @param dos + * @param o + * @throws IOException + */ + void save(DataOutputStream dos, Object o) throws IOException; + + /** + * Load the type from a data output stream + * @param in + * @return + * @throws IOException + */ + T load(DataInputStream in) throws IOException; + + /** + * @return get the class of the type + */ + Class getTypeClass(); + + /** + * return true if the Type can save the object. + * @param object + * @return + */ + boolean accepts(Object object); + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/Types.java b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/Types.java similarity index 60% rename from src/main/java/org/sakaiproject/nakamura/lite/types/Types.java rename to core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/Types.java index fdbc9422..26562d4f 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/Types.java +++ b/core/src/main/java/org/sakaiproject/nakamura/lite/storage/spi/types/Types.java @@ -1,4 +1,4 @@ -package org.sakaiproject.nakamura.lite.types; +package org.sakaiproject.nakamura.lite.storage.spi.types; import com.google.common.collect.ImmutableMap; @@ -29,6 +29,7 @@ public class Types { new BooleanType(), new BigDecimalType(), new DoubleType(), + new ISO8601DateType(), new CalendarType(), new StringArrayType(), new IntegerArrayType(), @@ -36,8 +37,13 @@ public class Types { new BooleanArrayType(), new BigDecimalArrayType(), new DoubleArrayType(), + new ISO8601DateArrayType(), new CalendarArrayType(), - new RemovePropertyType() + new RemovePropertyType(), + new LongStringArrayType(), + new LongStringType(), + new BigIntegerType(), + new BigIntegerArrayType() }; private static final Type UNKNOWN_TYPE = new StringType(); private static final Logger LOGGER = LoggerFactory.getLogger(Types.class); @@ -107,68 +113,105 @@ static Map> getTypeByIdMap() { * @param binaryStream * @throws IOException */ - public static void loadFromStream(String key, Map output, InputStream binaryStream) + public static void loadFromStream(String key, Map output, InputStream binaryStream, String type) throws IOException { DataInputStream dis = new DataInputStream(binaryStream); String ckey = dis.readUTF(); if (!key.equals(ckey)) { throw new IOException("Body Key does not match row key, unable to read"); } - int size = dis.readInt(); - LOGGER.debug("Reading {} items",size); - for (int i = 0; i < size; i++) { - String k = dis.readUTF(); - LOGGER.debug("Read key {} ",k); - output.put(k,lookupTypeById(dis.readInt()).load(dis)); + readMapFromStream(output, dis); + String cftype = null; + try { + cftype = dis.readUTF(); + } catch (IOException e) { + LOGGER.debug("No type specified"); + } + if (cftype != null && !cftype.equals(type)) { + throw new IOException( + "Object is not of expected column family, unable to read expected [" + type + + "] was [" + cftype + "]"); } LOGGER.debug("Finished Reading"); dis.close(); binaryStream.close(); } + public static void readMapFromStream(Map output, DataInputStream dis) throws IOException { + int size = dis.readInt(); + LOGGER.debug("Reading {} items", size); + for (int i = 0; i < size; i++) { + String k = dis.readUTF(); + LOGGER.debug("Read key {} ", k); + output.put(k, lookupTypeById(dis.readInt()).load(dis)); + } + } + /** * Save a map to a binary stream * + * * @param m - * expected to contain strings throughout + * expected to be keyed by string, can contain any object that + * has a type. * @return * @throws IOException */ - public static InputStream storeMapToStream(String key, Map m) + // IF you change this function you will have to change it in a way that + // either is self healing for all the data out there + // or write a migration script. Be warned, there could be billions of + // records out there, so be very careful + // Appending to record is possible, if you make the loader fail safe when + // the data isnt there. See the last writeUTF for an example. + public static InputStream storeMapToStream(String key, Map m, String type) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); dos.writeUTF(key); + writeMapToStream(m, dos); + // add the type in + dos.writeUTF(type); + dos.flush(); + baos.flush(); + byte[] b = baos.toByteArray(); + baos.close(); + dos.close(); + return new ByteArrayInputStream(b); + } + + + // IF you change this function you will have to change it in a way that + // either is self healing for all the data out there + // or write a migration script. Be warned, there could be billions of + // records out there, so be very careful + // Appending to record is possible, if you make the loader fail safe when + // the data isnt there. See the last writeUTF for an example. + public static void writeMapToStream(Map m, + DataOutputStream dos) throws IOException { int size = 0; for (Entry e : m.entrySet()) { Object o = e.getValue(); - if ( o != null && !(o instanceof RemoveProperty) ) { + if (o != null && !(o instanceof RemoveProperty)) { size++; } } dos.writeInt(size); - LOGGER.debug("Write {} items",size); + LOGGER.debug("Write {} items", size); for (Entry e : m.entrySet()) { Object o = e.getValue(); - if ( o != null && !(o instanceof RemoveProperty) ) { + if (o != null && !(o instanceof RemoveProperty)) { String k = e.getKey(); - LOGGER.debug("Write {} ",k); + LOGGER.debug("Write {} ", k); dos.writeUTF(k); Type t = getTypeOfObject(o); dos.writeInt(t.getTypeId()); t.save(dos, o); } } - LOGGER.debug("Finished Writen {} items",size); - dos.flush(); - baos.flush(); - byte[] b = baos.toByteArray(); - baos.close(); - dos.close(); - return new ByteArrayInputStream(b); - } - + LOGGER.debug("Finished Writen {} items", size); + + } private static Type lookupTypeById(int typeId) { Type t = (Type) typeByIdMap.get(typeId); @@ -186,17 +229,46 @@ private static Type getTypeOfObject(Object object) { } Class c = object.getClass(); if ( typeMap.containsKey(c)) { - return (Type) typeMap.get(c); + Type t = typeMap.get(c); + if ( t.accepts(object) ) { + return (Type) t; + } } for ( Entry,Type> e : typeMap.entrySet()) { - Class tc = e.getKey(); - if ( tc.isAssignableFrom(c) ) { - return (Type) e.getValue(); + Type t = e.getValue(); + if ( t.accepts(object) ) { + return (Type) t; } } - LOGGER.warn("Unknown Type For Object {}, needs to be implemented ",object); + LOGGER.warn("Unknown Type For Object {}, needs to be implemented ",object.getClass()); return (Type) UNKNOWN_TYPE; } + + public static byte[] toByteArray(Object o)throws IOException{ + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + DataOutputStream dos = new DataOutputStream(baos); + + if ( o != null && !(o instanceof RemoveProperty) ) { + Type t = getTypeOfObject(o); + dos.writeInt(t.getTypeId()); + t.save(dos, o); + } + + dos.flush(); + baos.flush(); + byte[] b = baos.toByteArray(); + baos.close(); + dos.close(); + + return b; + } + + public static Object toObject(byte[] columnValue)throws IOException{ + DataInputStream dis = new DataInputStream(new ByteArrayInputStream(columnValue)); + return lookupTypeById(dis.readInt()).load(dis); + + } + diff --git a/core/src/main/resources/org/sakaiproject/nakamura/header/header.txt b/core/src/main/resources/org/sakaiproject/nakamura/header/header.txt new file mode 100644 index 00000000..6dd9417c --- /dev/null +++ b/core/src/main/resources/org/sakaiproject/nakamura/header/header.txt @@ -0,0 +1,15 @@ +Licensed to the ${holder} (${holder.acronym}) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ${holder.acronym} licenses this file +to you under the Apache License, Version ${apache.license.version} (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-${apache.license.version} + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/core/src/main/resources/org/sakaiproject/nakamura/lite/shared.properties b/core/src/main/resources/org/sakaiproject/nakamura/lite/shared.properties new file mode 100644 index 00000000..eba4ad19 --- /dev/null +++ b/core/src/main/resources/org/sakaiproject/nakamura/lite/shared.properties @@ -0,0 +1,23 @@ +index-column-names = au:rep:principalName,au:type,cn:sling:resourceType,cn:sakai:pooled-content-manager,\ + cn:sakai:messagestore,cn:sakai:type,cn:sakai:marker,cn:sakai:tag-uuid,cn:sakai:contactstorepath,\ + cn:sakai:state,cn:firstName,cn:lastName,cn:_created,cn:sakai:category,cn:sakai:messagebox,cn:sakai:from,\ + cn:sakai:subject + +# /var/search/comments/discussions/threaded.json +# cn:sakai:messagestore, +# cn:sakai:type, +# cn:sakai:marker, +# *.tagged.json +# cn:sakai:tag-uuid, +# /var/contacts/findstate.json +# cn:sakai:contactstorepath, +# cn:sakai:state, +# cn:firstName, +# cn:lastName, +# content sorting +# cn:_created, +# /var/message/boxcategory.json +# cn:sakai:category, +# cn:sakai:messagebox, +# cn:sakai:from, +# cn:sakai:subject, \ No newline at end of file diff --git a/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ApacheDerby.ddl b/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ApacheDerby.ddl new file mode 100644 index 00000000..2a51142c --- /dev/null +++ b/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ApacheDerby.ddl @@ -0,0 +1,147 @@ + +CREATE TABLE css ( + id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v varchar(780) NOT NULL, + primary key(id)); +CREATE INDEX css_i ON css (rid, cid); + +CREATE TABLE au_css ( + id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v varchar(780) NOT NULL, + primary key(id)); +CREATE INDEX au_css_i ON au_css (rid, cid); + +CREATE TABLE ac_css ( + id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v varchar(780) NOT NULL, + primary key(id)); +CREATE INDEX ac_css_i ON ac_css (rid, cid); + +CREATE TABLE cn_css ( + id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v varchar(780) NOT NULL, + primary key(id)); +CREATE INDEX cn_css_i ON cn_css (rid, cid); + +CREATE TABLE lk_css ( + id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v varchar(780) NOT NULL, + primary key(id)); +CREATE INDEX lk_css_i ON lk_css (rid, cid); + +CREATE TABLE css_w ( + rid varchar(32) NOT NULL, + primary key(rid)); + + + +CREATE TABLE ac_css_w ( + rid varchar(32) NOT NULL, + primary key(rid)); + + + + + +CREATE TABLE au_css_w ( + rid varchar(32) NOT NULL, + primary key(rid)); + + +CREATE TABLE cn_css_w ( + rid varchar(32) NOT NULL, + primary key(rid)); + +CREATE TABLE lk_css_w ( + rid varchar(32) NOT NULL, + primary key(rid)); + + +CREATE TABLE css_wr ( + id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1), + cf varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + cname varchar(64) NOT NULL, + primary key(id)); + +CREATE UNIQUE INDEX css_r_cid ON css_wr (cf,cid); +CREATE UNIQUE INDEX css_r_cnam ON css_wr (cf,cname); + + + +CREATE TABLE csb ( + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v blob, + primary key(rid,cid)); + +CREATE TABLE au_csb ( + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v blob, + primary key(rid,cid)); + +CREATE TABLE ac_csb ( + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v blob, + primary key(rid,cid)); + +CREATE TABLE cn_csb ( + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v blob, + primary key(rid,cid)); + +CREATE TABLE lk_csb ( + rid varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + v blob, + primary key(rid,cid)); + +CREATE INDEX css_locate_idx ON css (v, cid); +CREATE INDEX au_css_locate_idx ON au_css (v, cid); +CREATE INDEX ac_css_locate_idx ON ac_css (v, cid); +CREATE INDEX cn_css_locate_idx ON cn_css (v, cid); +CREATE INDEX lk_css_locate_idx ON lk_css (v, cid); + + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE css_b ( + rid varchar(32) NOT NULL, + b blob, + primary key(rid)); + +CREATE TABLE au_css_b ( + rid varchar(32) NOT NULL, + b blob, + primary key(rid)); + +CREATE TABLE ac_css_b ( + rid varchar(32) NOT NULL, + b blob, + primary key(rid)); + +CREATE TABLE cn_css_b ( + rid varchar(32) NOT NULL, + b blob, + primary key(rid)); + + +CREATE TABLE lk_css_b ( + rid varchar(32) NOT NULL, + b blob, + primary key(rid)); + + + diff --git a/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ApacheDerby.sql b/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ApacheDerby.sql new file mode 100644 index 00000000..6597bdb9 --- /dev/null +++ b/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ApacheDerby.sql @@ -0,0 +1,165 @@ +delete-string-row = delete from css where rid = ? +delete-string-row.n.au = delete from au_css where rid = ? +delete-string-row.n.ac = delete from ac_css where rid = ? +delete-string-row.n.cn = delete from cn_css where rid = ? +delete-string-row.n.lk = delete from lk_css where rid = ? +select-string-row = select cid, v from css where rid = ? +select-string-row.n.au = select cid, v from au_css where rid = ? +select-string-row.n.ac = select cid, v from ac_css where rid = ? +select-string-row.n.cn = select cid, v from cn_css where rid = ? +select-string-row.n.lk = select cid, v from lk_css where rid = ? +insert-string-column = insert into css ( v, rid, cid) values ( ?, ?, ? ) +insert-string-column.n.au = insert into au_css ( v, rid, cid) values ( ?, ?, ?) +insert-string-column.n.ac = insert into ac_css ( v, rid, cid) values ( ?, ?, ?) +insert-string-column.n.cn = insert into cn_css ( v, rid, cid) values ( ?, ?, ?) +insert-string-column.n.lk = insert into lk_css ( v, rid, cid) values ( ?, ?, ?) +update-string-column = update css set v = ? where rid = ? and cid = ? +update-string-column.n.au = update au_css set v = ? where rid = ? and cid = ? +update-string-column.n.ac = update ac_css set v = ? where rid = ? and cid = ? +update-string-column.n.cn = update cn_css set v = ? where rid = ? and cid = ? +update-string-column.n.lk = update lk_css set v = ? where rid = ? and cid = ? +remove-string-column = delete from css where rid = ? and cid = ? +remove-string-column.n.au = delete from au_css where rid = ? and cid = ? +remove-string-column.n.ac = delete from ac_css where rid = ? and cid = ? +remove-string-column.n.cn = delete from cn_css where rid = ? and cid = ? +remove-string-column.n.lk = delete from lk_css where rid = ? and cid = ? +check-schema = select count(*) from css + +# base statement with paging ; table join ; where clause ; where clause for sort field (if needed) ; order by clause +find.n.au = select TR.rid, TR.cid, TR.v from (select a.rid, a.cid, a.v, ROW_NUMBER() OVER () AS R from au_css a {0} where {1} 1 = 1 {2}) as TR where TR.R > {4,number,#} and TR.R <= {3,number,#}+{4,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.ac = select TR.rid, TR.cid, TR.v from (select a.rid, a.cid, a.v, ROW_NUMBER() OVER () AS R from ac_css a {0} where {1} 1 = 1 {2}) as TR where TR.R > {4,number,#} and TR.R <= {3,number,#}+{4,number,#};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.cn = select TR.rid, TR.cid, TR.v from (select a.rid, a.cid, a.v, ROW_NUMBER() OVER () AS R from cn_css a {0} where {1} 1 = 1 {2}) as TR where TR.R > {4,number,#} and TR.R <= {3,number,#}+{4,number,#};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.lk = select TR.rid, TR.cid, TR.v from (select a.rid, a.cid, a.v, ROW_NUMBER() OVER () AS R from lk_css a {0} where {1} 1 = 1 {2}) as TR where TR.R > {4,number,#} and TR.R <= {3,number,#}+{4,number,#};, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +validate = values(1) +rowid-hash = SHA1 + + + +block-select-row = select b from css_b where rid = ? +block-delete-row = delete from css_b where rid = ? +block-insert-row = insert into css_b (rid,b) values (?, ?) +block-update-row = update css_b set b = ? where rid = ? +list-all = select rid, b from css_b +list-all-count = select count(*) from css_b + +block-select-row.n.au = select b from au_css_b where rid = ? +block-delete-row.n.au = delete from au_css_b where rid = ? +block-insert-row.n.au = insert into au_css_b (rid,b) values (?, ?) +block-update-row.n.au = update au_css_b set b = ? where rid = ? +list-all.n.au = select rid, b from au_css_b +list-all-count.n.au = select count(*) from au_css_b + +block-select-row.n.ac = select b from ac_css_b where rid = ? +block-delete-row.n.ac = delete from ac_css_b where rid = ? +block-insert-row.n.ac = insert into ac_css_b (rid,b) values (?, ?) +block-update-row.n.ac = update ac_css_b set b = ? where rid = ? +list-all.n.ac = select rid, b from ac_css_b +list-all-count.n.ac = select count(*) from ac_css_b + +block-select-row.n.cn = select b from cn_css_b where rid = ? +block-delete-row.n.cn = delete from cn_css_b where rid = ? +block-insert-row.n.cn = insert into cn_css_b (rid,b) values (?, ?) +block-update-row.n.cn = update cn_css_b set b = ? where rid = ? +list-all.n.cn = select rid, b from cn_css_b +list-all-count.n.cn = select count(*) from cn_css_b + +block-select-row.n.lk = select b from lk_css_b where rid = ? +block-delete-row.n.lk = delete from lk_css_b where rid = ? +block-insert-row.n.lk = insert into lk_css_b (rid,b) values (?, ?) +block-update-row.n.lk = update lk_css_b set b = ? where rid = ? +list-all.n.lk = select rid, b from lk_css_b +list-all-count.n.lk = select count(*) from lk_css_b + +# base statement with paging ; table join ; where clause ; where clause for sort field (if needed) ; order by clause; sort field column( if needed) +## the subselect in the paging statement is required by Derby to do paging. http://db.apache.org/derby/docs/10.6/ref/rreffuncrownumber.html +block-find = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select distinct a.rid {5} from css a {0} where {1} 1 = 1 {2}) as s) as TR where TR.R > {4,number,#} and TR.R <= {3,number,#}+{4,number,#};, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +block-find.n.au = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select distinct a.rid {5} from au_css a {0} where {1} 1 = 1 {2}) as s) as TR where TR.R > {4,number,#} and TR.R <= {3,number,#}+{4,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v +block-find.n.ac = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select distinct a.rid {5} from ac_css a {0} where {1} 1 = 1 {2}) as s) as TR where TR.R > {4,number,#} and TR.R <= {3,number,#}+{4,number,#};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v +block-find.n.cn = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select distinct a.rid {5} from cn_css a {0} where {1} 1 = 1 {2}) as s) as TR where TR.R > {4,number,#} and TR.R <= {3,number,#}+{4,number,#};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v +block-find.n.lk = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select distinct a.rid {5} from lk_css a {0} where {1} 1 = 1 {2}) as s) as TR where TR.R > {4,number,#} and TR.R <= {3,number,#}+{4,number,#};, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v + +# Optimized queries to find children +listchildren = select distinct a.rid {5} from css a {0} where {1} 1 = 1 {2};, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +listchildren.n.au = select distinct a.rid {5} from au_css a {0} where {1} 1 = 1 {2};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v +listchildren.n.ac = select distinct a.rid {5} from ac_css a {0} where {1} 1 = 1 {2};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v +listchildren.n.cn = select distinct a.rid {5} from cn_css a {0} where {1} 1 = 1 {2};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v +listchildren.n.lk = select distinct a.rid {5} from lk_css a {0} where {1} 1 = 1 {2};, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v + +# Optimized queries estimate the count of any query. +countestimate = select count(*) from (select distinct a.rid {5} from css a {0} where {1} 1 = 1 {2}) as tocount;, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +countestimate.n.au = select count(*) from (select distinct a.rid {5} from au_css a {0} where {1} 1 = 1 {2}) as tocount;, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v +countestimate.n.ac = select count(*) from (select distinct a.rid {5} from ac_css a {0} where {1} 1 = 1 {2}) as tocount;, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v +countestimate.n.cn = select count(*) from (select distinct a.rid {5} from cn_css a {0} where {1} 1 = 1 {2}) as tocount;, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v +countestimate.n.lk = select count(*) from (select distinct a.rid {5} from lk_css a {0} where {1} 1 = 1 {2}) as tocount;, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1};, {0}.v + + +use-batch-inserts = 0 + +# Queries that take longer than these times to execute will be logged with warn and error respectively. +# Logging is performed against org.sakaiproject.nakamura.lite.storage.spi.jdbc.JDBCStorageClient.SlowQueryLogger +slow-query-time = 50 +very-slow-query-time = 100 + +index-column-name-select = select cf, cid, cname from css_wr +index-column-name-insert = insert into css_wr ( cf, cid, cname ) values ( ? , ? , ? ) +alter-widestring-table = ALTER TABLE {0}_css_w ADD {1} varchar(780) +index-widestring-table = CREATE INDEX {0}_css_w_{1} ON {0}_css_w ({1}) + +exists-widestring-row = select rid from css_w where rid = ? +exists-widestring-row.n.cn = select rid from cn_css_w where rid = ? +exists-widestring-row.n.ac = select rid from ac_css_w where rid = ? +exists-widestring-row.n.au = select rid from au_css_w where rid = ? +exists-widestring-row.n.lk = select rid from lk_css_w where rid = ? + + +delete-widestring-row = delete from css_w where rid = ? +delete-widestring-row.n.cn = delete from cn_css_w where rid = ? +delete-widestring-row.n.ac = delete from ac_css_w where rid = ? +delete-widestring-row.n.au = delete from au_css_w where rid = ? +delete-widestring-row.n.lk = delete from lk_css_w where rid = ? + +update-widestring-row = update css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.cn = update cn_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.ac = update ac_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.au = update au_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.lk = update lk_css_w set {0} where rid = ?; {0} = ? + + +insert-widestring-row = insert into css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.cn = insert into cn_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.ac = insert into ac_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.au = insert into au_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.lk = insert into lk_css_w ( rid {0} ) values ( ? {1} ) + + +## * Part 0 basic SQL template; {0} is the where clause {1} is the sort clause {2} is the from {3} is the to record +## * eg select rid from css where {0} {1} LIMIT {2} ROWS {3} +## * Part 1 where clause for non array matches; {0} is the columnName +## * eg {0} = ? +## * Part 2 where clause for array matches (not possible to sort on array matches) {0} is the table alias, {1} is the where clause +## * eg rid in ( select {0}.rid from css {0} where {1} ) +## * Part 3 the where clause for array matches {0} is the table alias +## * eg {0}.cid = ? and {0}.v = ? +## * Part 3 sort clause {0} is the list to sort by +## * eg sort by {0} +## * Part 4 sort elements, {0} is the column, {1} is the order +## * eg {0} {1} +## * Dont include , AND or OR, the code will add those as appropriate. +wide-block-find = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select a.rid from css_w a where {0} {1} ) as s) as TR where TR.R > {3,number,#} and TR.R <= {2,number,#}+{3,number,#};a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-block-find.n.cn = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select a.rid from cn_css_w a where {0} {1} ) as s) as TR where TR.R > {3,number,#} and TR.R <= {2,number,#}+{3,number,#};a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-block-find.n.ac = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select a.rid from ac_css_w a where {0} {1} ) as s) as TR where TR.R > {3,number,#} and TR.R <= {2,number,#}+{3,number,#};a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-block-find.n.au = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select a.rid from au_css_w a where {0} {1} ) as s) as TR where TR.R > {3,number,#} and TR.R <= {2,number,#}+{3,number,#};a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-block-find.n.lk = select TR.rid from (select s.rid, ROW_NUMBER() OVER () AS R from (select a.rid from lk_css_w a where {0} {1} ) as s) as TR where TR.R > {3,number,#} and TR.R <= {2,number,#}+{3,number,#};a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} + +wide-listchildren = select a.rid from css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-listchildren.n.cn = select a.rid from cn_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-listchildren.n.ac = select a.rid from ac_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-listchildren.n.au = select a.rid from au_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-listchildren.n.lk = select a.rid from lk_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} + +wide-countestimate = select count(*) from css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-countestimate.n.cn = select count(*) from cn_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-countestimate.n.ac = select count(*) from ac_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-countestimate.n.au = select count(*) from au_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} +wide-countestimate.n.lk = select count(*) from lk_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;sort by {0};{0} {1} + diff --git a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ddl b/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ddl similarity index 100% rename from src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ddl rename to core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ddl diff --git a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.sql b/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.sql similarity index 53% rename from src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.sql rename to core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.sql index 49bcbbc1..d0fc80ce 100644 --- a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.sql +++ b/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.sql @@ -1,72 +1,84 @@ -# SQL statements of the form key[.keyspace.columnfamily.[rowID0-2]] -# the based key should always be present -# the keyspace.columnfamily selectors are used to shard the column family (optional) -# the rowID0-2 is to shard on rowID, you can selectively shard hot rowID areas. -# If sharding ensure that any exiting data is migrated (using SQL DML) and that the finder statements are adjusted to incorporate the shards (warning, might be hard) -# Indexer statements +# +# This SQL file is here for reference, and will only be used if a specific file for the DB type is not available. +# The finder SQL in this file is unlikely to function correctly for the database in question as paging is non standard +# for SQL. +# + delete-string-row = delete from css where rid = ? -delete-string-row.n.ac = delete from ac_css where rid = ? delete-string-row.n.au = delete from au_css where rid = ? +delete-string-row.n.ac = delete from ac_css where rid = ? delete-string-row.n.cn = delete from cn_css where rid = ? select-string-row = select cid, v from css where rid = ? -select-string-row.n.ac = select cid, v from ac_css where rid = ? select-string-row.n.au = select cid, v from au_css where rid = ? +select-string-row.n.ac = select cid, v from ac_css where rid = ? select-string-row.n.cn = select cid, v from cn_css where rid = ? insert-string-column = insert into css ( v, rid, cid) values ( ?, ?, ? ) -insert-string-column.n.ac = insert into ac_css ( v, rid, cid) values ( ?, ?, ? ) insert-string-column.n.au = insert into au_css ( v, rid, cid) values ( ?, ?, ? ) +insert-string-column.n.ac = insert into ac_css ( v, rid, cid) values ( ?, ?, ? ) insert-string-column.n.cn = insert into cn_css ( v, rid, cid) values ( ?, ?, ? ) update-string-column = update css set v = ? where rid = ? and cid = ? -update-string-column.n.ac = update ac_css set v = ? where rid = ? and cid = ? update-string-column.n.au = update au_css set v = ? where rid = ? and cid = ? +update-string-column.n.ac = update ac_css set v = ? where rid = ? and cid = ? update-string-column.n.cn = update cn_css set v = ? where rid = ? and cid = ? remove-string-column = delete from css where rid = ? and cid = ? -remove-string-column.n.ac = delete from ac_css where rid = ? and cid = ? remove-string-column.n.au = delete from au_css where rid = ? and cid = ? +remove-string-column.n.ac = delete from ac_css where rid = ? and cid = ? remove-string-column.n.cn = delete from cn_css where rid = ? and cid = ? -# Example of a sharded query, rowIDs starting with x will use this -### remove-string-column.n.cn._X = delete from cn_css_X where rid = ? and cid = ? -find.n.au = select a.rid, a.cid, a.v from au_css a {0} where {1} 1 = 1;, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid and +check-schema = select count(*) from css + +# 0: select +# 1: table join +# 2: where clause +# 3: where clause for sort field (if needed) +# 4: order by clause +find.n.au = select a.rid, a.cid, a.v from au_css a {0} where {1} 1 = 1 ;, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.ac = select a.rid, a.cid, a.v from ac_css a {0} where {1} 1 = 1 ;, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.cn = select a.rid, a.cid, a.v from cn_css a {0} where {1} 1 = 1 ;, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +validate = values(1) +rowid-hash = SHA1 + + select-index-columns = select cid from index_cols + block-select-row = select b from css_b where rid = ? block-delete-row = delete from css_b where rid = ? block-insert-row = insert into css_b (rid,b) values (?, ?) block-update-row = update css_b set b = ? where rid = ? +list-all = select rid, b from css_b + +block-select-row.n.au = select b from au_css_b where rid = ? +block-delete-row.n.au = delete from au_css_b where rid = ? +block-insert-row.n.au = insert into au_css_b (rid,b) values (?, ?) +block-update-row.n.au = update au_css_b set b = ? where rid = ? +list-all.n.au = select rid, b from au_css_b block-select-row.n.ac = select b from ac_css_b where rid = ? block-delete-row.n.ac = delete from ac_css_b where rid = ? block-insert-row.n.ac = insert into ac_css_b (rid,b) values (?, ?) block-update-row.n.ac = update ac_css_b set b = ? where rid = ? +list-all.n.ac = select rid, b from ac_css_b block-select-row.n.cn = select b from cn_css_b where rid = ? block-delete-row.n.cn = delete from cn_css_b where rid = ? block-insert-row.n.cn = insert into cn_css_b (rid,b) values (?, ?) block-update-row.n.cn = update cn_css_b set b = ? where rid = ? +list-all.n.cn = select rid, b from cn_css_b -block-select-row.n.au = select b from au_css_b where rid = ? -block-delete-row.n.au = delete from au_css_b where rid = ? -block-insert-row.n.au = insert into au_css_b (rid,b) values (?, ?) -block-update-row.n.au = update au_css_b set b = ? where rid = ? - -# -# These are finder statements -block-find = select a.rid, a.b from css_b a {0} where {1} 1 = 1;, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid and -block-find.n.au = select a.rid, a.b from au_css_b a {0} where {1} 1 = 1;, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid and -block-find.n.cn = select a.rid, a.b from cn_css_b a {0} where {1} 1 = 1;, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid and -block-find.n.ac = select a.rid, a.b from ac_css_b a {0} where {1} 1 = 1;, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid and +# 0: base statement +# 1: table join +# 2: where clause +# 3: where clause for sort field (if needed) +# 4: order by clause +block-find = select distinct a.rid from css a {0} where {1} 1 = 1;, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +block-find.n.au = select distinct a.rid from au_css a {0} where {1} 1 = 1;, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +block-find.n.ac = select distinct a.rid from ac_css a {0} where {1} 1 = 1;, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +block-find.n.cn = select distinct a.rid from cn_css a {0} where {1} 1 = 1;, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +use-batch-inserts = 0 -# statement to validate the connection -validate = select 1 - -# What type of rowID has should be used. Must be non colliding (reasonable probability), cant be changed once set without data migration. -# SHA-1 has a 1:10E14 probability of collision, so IMVHO is Ok here. Do not use MD5, it will collide. -rowid-hash = SHA1 - -# statement to check that the schema exists -check-schema = select count(*) from css +# Queries that take longer than these times to execute will be logged with warn and error respectively. +# Logging is performed against org.sakaiproject.nakamura.lite.storage.spi.jdbc.JDBCStorageClient.SlowQueryLogger +slow-query-time = 50 +very-slow-query-time = 100 -# Use batch Inserts means that update operations will be performed as batches rather than single SQL statements. This only really effects the update of -# Index tables and not the content store but it will reduce the number of SQL operations where more than one field is indexed per content item. -use-batch-inserts = 1 diff --git a/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/README.txt b/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/README.txt new file mode 100644 index 00000000..8b05ebaa --- /dev/null +++ b/core/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/README.txt @@ -0,0 +1,3 @@ +In this folder you should put upgrade scripts. +They should be named by date and feature and contain the SQL statements to be run against a database to perform an upgrade on the database. +If its not possible to express the action in SQL, then express the action in words. \ No newline at end of file diff --git a/src/test/java/org/sakaiproject/nakamura/lite/storage/StorageClientUtilsTest.java b/core/src/test/java/org/sakaiproject/nakamura/api/lite/StorageClientUtilsTest.java similarity index 80% rename from src/test/java/org/sakaiproject/nakamura/lite/storage/StorageClientUtilsTest.java rename to core/src/test/java/org/sakaiproject/nakamura/api/lite/StorageClientUtilsTest.java index 2706044a..26563eda 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/storage/StorageClientUtilsTest.java +++ b/core/src/test/java/org/sakaiproject/nakamura/api/lite/StorageClientUtilsTest.java @@ -15,7 +15,7 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.storage; +package org.sakaiproject.nakamura.api.lite; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -26,7 +26,9 @@ import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import java.io.UnsupportedEncodingException; +import java.util.HashSet; import java.util.Map; +import java.util.Set; public class StorageClientUtilsTest { @@ -95,6 +97,8 @@ public void testGetParentObjectPath() { Assert.assertEquals("/", StorageClientUtils.getParentObjectPath("/test/")); Assert.assertEquals("/test", StorageClientUtils.getParentObjectPath("/test/ing")); Assert.assertEquals("/test", StorageClientUtils.getParentObjectPath("/test/ing/")); + Assert.assertEquals("http:/", StorageClientUtils.getParentObjectPath("http://localhost")); + Assert.assertEquals("http:", StorageClientUtils.getParentObjectPath("http://")); } @Test @@ -116,19 +120,25 @@ public void testHash() { public void testGetFilterMap() { Map t = ImmutableMap.of("a", (Object) "b", "c", "d", "y", "should have been removed"); Map modifications = ImmutableMap.of("a", (Object) "b", "x", "New", "y", new RemoveProperty() ); - Map m = StorageClientUtils.getFilterMap(t, modifications, null, ImmutableSet.of("c")); + Map m = StorageClientUtils.getFilterMap(t, modifications, null, ImmutableSet.of("c"), false); Assert.assertEquals(2, m.size()); Assert.assertEquals("b", m.get("a")); Assert.assertEquals("New", m.get("x")); Assert.assertFalse(m.containsKey("y")); + m = StorageClientUtils.getFilterMap(t, modifications, null, ImmutableSet.of("c"), true); + Assert.assertEquals(3, m.size()); + Assert.assertEquals("b", m.get("a")); + Assert.assertEquals("New", m.get("x")); + Assert.assertTrue(m.containsKey("y")); Map t2 = ImmutableMap.of("a", (Object) "b", "c", "d", "e", m); - Map m2 = StorageClientUtils.getFilterMap(t2, null, null, ImmutableSet.of("c")); + Map m2 = StorageClientUtils.getFilterMap(t2, null, null, ImmutableSet.of("c"), false); Assert.assertEquals(2, m2.size()); Assert.assertEquals("b", m2.get("a")); m = (Map) m2.get("e"); - Assert.assertEquals(2, m.size()); + Assert.assertEquals(3, m.size()); Assert.assertEquals("b", m.get("a")); Assert.assertEquals("New", m.get("x")); + Assert.assertTrue(m.containsKey("y")); } @@ -148,5 +158,36 @@ public void testGetFilteredAndEcodedMap() throws UnsupportedEncodingException { Assert.assertEquals(1, m.size()); Assert.assertEquals("b", m.get("a")); } + + + @Test + public void testEncode() { + Set check = new HashSet(); + byte[] b = new byte[1]; + for ( int i = 0; i < 100000; i++ ) { + b = incByteArray(b,0); + String id = StorageClientUtils.encode(b); + if ( check.contains(id) ) { + Assert.fail(id); + } + check.add(id); + } + } + + + private byte[] incByteArray(byte[] b, int i) { + if ( i == b.length) { + byte[] bn = new byte[b.length+1]; + System.arraycopy(b, 0, bn, 0, b.length); + bn[i] = 0x01; + b = bn; + } else { + b[i]++; + if (b[i] == 0) { + b = incByteArray(b, i + 1); + } + } + return b; + } } diff --git a/src/test/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModificationTest.java b/core/src/test/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModificationTest.java similarity index 100% rename from src/test/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModificationTest.java rename to core/src/test/java/org/sakaiproject/nakamura/api/lite/accesscontrol/AclModificationTest.java diff --git a/core/src/test/java/org/sakaiproject/nakamura/api/lite/authorizable/AuthorizableTest.java b/core/src/test/java/org/sakaiproject/nakamura/api/lite/authorizable/AuthorizableTest.java new file mode 100644 index 00000000..bf1b62d8 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/api/lite/authorizable/AuthorizableTest.java @@ -0,0 +1,168 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.lite.authorizable; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; + +public class AuthorizableTest { + + protected Authorizable u; + + @Before + public void setup() throws StorageClientException, AccessDeniedException{ + // u is an empty, non-anonymous, user at the beginning of each test. + u = new User(new HashMap()); + } + + // --- Init + + @Test + public void testInitEmpty(){ + assertEquals(1, u.getPrincipals().length); + // A non-anonymous user has the EVERYONE principal. + assertEquals(Group.EVERYONE, u.getPrincipals()[0]); + } + + @Test + public void testInitAnonymous() throws StorageClientException, AccessDeniedException { + Map props = new HashMap(); + props.put(Authorizable.ID_FIELD, User.ANON_USER); + Authorizable a = new User(props); + // The anonymous user has no principals. + assertEquals(0, a.getPrincipals().length); + } + + @Test + public void testInitPrincipals() throws StorageClientException, AccessDeniedException { + Map props = new HashMap(); + props.put(Authorizable.PRINCIPALS_FIELD, "principal1-managers;principal2"); + Authorizable a = new User(props); + // principal1-managers, principal2, Group.EVERYONE + assertEquals(3, a.getPrincipals().length); + } + + // --- Properties, get, set, has, remove + + @Test + public void testGetProperty(){ + assertNull(u.getProperty("somekey")); + u.setProperty("somekey", "found"); + assertEquals("found", u.getProperty("somekey")); + } + + @Test + public void testGetPrivateProperty(){ + u.setProperty(Authorizable.PASSWORD_FIELD, "testpass"); + assertNull(u.getProperty(Authorizable.PASSWORD_FIELD)); + } + + @Test + public void testHasProperty(){ + assertFalse(u.hasProperty("anykey")); + u.setProperty("anykey", "where's the any key?"); + assertTrue(u.hasProperty("anykey")); + } + + @Test + public void testRemoveProperty(){ + u.setProperty("anykey", "where's the any key?"); + assertTrue(u.hasProperty("anykey")); + u.removeProperty("anykey"); + assertFalse(u.hasProperty("anykey")); + assertNull(u.getProperty("anykey")); + } + + @Test + public void testSetOverrideProperty(){ + u.setProperty("anykey", "value1"); + u.setProperty("anykey", "value2"); + assertEquals("value2", u.getProperty("anykey")); + } + + // --- Modified + + @Test + public void isModified(){ + u.setProperty("anykey", "value1"); + assertTrue(u.isModified()); + assertTrue(u.modifiedMap.size() > 0); + + u.reset((Map)new HashMap()); + assertFalse(u.isModified()); + + u.addPrincipal("first"); + assertTrue(u.isModified()); + } + + // --- Principals + + @Test + public void testAddPrincipal(){ + assertEquals(1, u.principals.size()); + u.addPrincipal("first"); + u.addPrincipal("second"); + assertEquals(3, u.principals.size()); + assertTrue(u.principalsModified); + } + + @Test + public void testRemovePrincipal(){ + assertEquals(1, u.principals.size()); + u.removePrincipal(Group.EVERYONE); + assertEquals(0, u.principals.size()); + assertTrue(u.principalsModified); + } + + // --- Reset + + @Test + public void testReset(){ + u.setProperty("anykey", "where's the any key?"); + + Map newProps = new HashMap(); + newProps.put("newkey", "No time for that the computer's starting!"); + // Reset the properties with new ones. + u.reset(newProps); + + // Old properties should no longer be present. + assertFalse(u.hasProperty("anykey")); + assertTrue(u.hasProperty("newkey")); + assertEquals(0, u.modifiedMap.size()); + } + + @Test + public void testResetEmpty(){ + u.setProperty("anykey", "where's the any key?"); + // Reset the properties with an empty Map. + u.reset(new HashMap()); + assertFalse(u.hasProperty("anykey")); + // No properties should be present. + assertEquals(0, u.modifiedMap.size()); + } +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/api/lite/util/EnablePeriodTest.java b/core/src/test/java/org/sakaiproject/nakamura/api/lite/util/EnablePeriodTest.java new file mode 100644 index 00000000..de8bfb6a --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/api/lite/util/EnablePeriodTest.java @@ -0,0 +1,86 @@ +package org.sakaiproject.nakamura.api.lite.util; + +import java.util.Calendar; +import java.util.TimeZone; + +import org.junit.Assert; +import org.junit.Test; + +public class EnablePeriodTest { + + @Test + public void testNone() { + Calendar[] none = new Calendar[]{null, null}; + Assert.assertArrayEquals(none, EnabledPeriod.getEnabledPeriod(null)); + Assert.assertArrayEquals(none, EnabledPeriod.getEnabledPeriod("sdfsdf")); + Assert.assertArrayEquals(none, EnabledPeriod.getEnabledPeriod(",sdffds")); + Assert.assertArrayEquals(none, EnabledPeriod.getEnabledPeriod(",sdffds,")); + Assert.assertArrayEquals(none, EnabledPeriod.getEnabledPeriod("sdffds,")); + + } + @Test + public void testFrom() { + Calendar[] from = new Calendar[]{new ISO8601Date("20110112"), null}; + Assert.assertArrayEquals(from, EnabledPeriod.getEnabledPeriod("2011-01-12,")); + + } + @Test + public void testTo() { + Calendar[] from = new Calendar[]{null, new ISO8601Date("20110112")}; + Assert.assertArrayEquals(from, EnabledPeriod.getEnabledPeriod(",2011-01-12")); + + } + @Test + public void testBetween() { + Calendar[] from = new Calendar[]{new ISO8601Date("20110124"), new ISO8601Date("20110128")}; + Assert.assertArrayEquals(from, EnabledPeriod.getEnabledPeriod("2011-01-24,2011-01-28")); + } + + @Test + public void testPeriod() { + // test this properly including a time zone differences that cross the date line. + long testTime = 1321998791404L; // 2011-11-23T08:53 AEST which makes it 2011-11-22T21:53:11Z + long testTimeEnd = 1322086391404L; // 2011-11-24T09:13 AEST which makes it 2011-11-23T22:13:11Z + Assert.assertNull(EnabledPeriod.getEnableValue(-1, -1, false, TimeZone.getTimeZone("GMT"))); + Assert.assertEquals("2011-11-22T21:53:11Z,",EnabledPeriod.getEnableValue(testTime, -1, false, TimeZone.getTimeZone("GMT"))); + Assert.assertEquals("2011-11-22,",EnabledPeriod.getEnableValue(testTime, -1, true, TimeZone.getTimeZone("GMT"))); + Assert.assertEquals("2011-11-23T08:53:11+11:00,",EnabledPeriod.getEnableValue(testTime, -1, false, TimeZone.getTimeZone("Australia/Sydney"))); + Assert.assertEquals("2011-11-23,",EnabledPeriod.getEnableValue(testTime, -1, true, TimeZone.getTimeZone("Australia/Sydney"))); + + Assert.assertEquals(",2011-11-23T22:13:11Z",EnabledPeriod.getEnableValue(-1, testTimeEnd, false, TimeZone.getTimeZone("GMT"))); + Assert.assertEquals(",2011-11-23",EnabledPeriod.getEnableValue( -1, testTimeEnd, true, TimeZone.getTimeZone("GMT"))); + Assert.assertEquals(",2011-11-24T09:13:11+11:00",EnabledPeriod.getEnableValue( -1,testTimeEnd, false, TimeZone.getTimeZone("Australia/Sydney"))); + Assert.assertEquals(",2011-11-24",EnabledPeriod.getEnableValue( -1, testTimeEnd, true, TimeZone.getTimeZone("Australia/Sydney"))); + + Assert.assertEquals("2011-11-22T21:53:11Z,2011-11-23T22:13:11Z",EnabledPeriod.getEnableValue(testTime, testTimeEnd, false, TimeZone.getTimeZone("GMT"))); + Assert.assertEquals("2011-11-22,2011-11-23",EnabledPeriod.getEnableValue( testTime, testTimeEnd, true, TimeZone.getTimeZone("GMT"))); + Assert.assertEquals("2011-11-23T08:53:11+11:00,2011-11-24T09:13:11+11:00",EnabledPeriod.getEnableValue( testTime, testTimeEnd, false, TimeZone.getTimeZone("Australia/Sydney"))); + Assert.assertEquals("2011-11-23,2011-11-24",EnabledPeriod.getEnableValue( testTime, testTimeEnd, true, TimeZone.getTimeZone("Australia/Sydney"))); + + } + + @Test + public void testPeriodEnable() { + long minus24h = System.currentTimeMillis()-48*60*60*1000; + long minus1h = System.currentTimeMillis()-60*1000; + long plus24h = System.currentTimeMillis()+48*60*60*1000; + Assert.assertTrue(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(minus1h, plus24h, false, TimeZone.getTimeZone("GMT")))); + Assert.assertTrue(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(minus24h, plus24h, true, TimeZone.getTimeZone("GMT")))); + Assert.assertTrue(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(-1, plus24h, true, TimeZone.getTimeZone("GMT")))); + Assert.assertTrue(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(minus24h, -1, true, TimeZone.getTimeZone("GMT")))); + Assert.assertTrue(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(-1, -1, true, TimeZone.getTimeZone("GMT")))); + + + Assert.assertFalse(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(0, 10, false, TimeZone.getTimeZone("GMT")))); + Assert.assertFalse(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(plus24h, minus24h, false, TimeZone.getTimeZone("GMT")))); + Assert.assertFalse(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(-1, minus1h, false, TimeZone.getTimeZone("GMT")))); + Assert.assertFalse(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(plus24h, -1, false, TimeZone.getTimeZone("GMT")))); + + Assert.assertFalse(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(0, 10, true, TimeZone.getTimeZone("GMT")))); + Assert.assertFalse(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(plus24h, minus24h, true, TimeZone.getTimeZone("GMT")))); + Assert.assertFalse(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(-1, minus24h, true, TimeZone.getTimeZone("GMT")))); + Assert.assertFalse(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(plus24h, -1, true, TimeZone.getTimeZone("GMT")))); + Assert.assertFalse(EnabledPeriod.isInEnabledPeriod(EnabledPeriod.getEnableValue(-1, minus24h, true, TimeZone.getTimeZone("GMT")))); + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/api/lite/util/ISO8601DateTest.java b/core/src/test/java/org/sakaiproject/nakamura/api/lite/util/ISO8601DateTest.java new file mode 100644 index 00000000..1307ce30 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/api/lite/util/ISO8601DateTest.java @@ -0,0 +1,96 @@ +package org.sakaiproject.nakamura.api.lite.util; + +import org.junit.Assert; +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.util.ISO8601Date; + +import java.util.Calendar; +import java.util.SimpleTimeZone; +import java.util.TimeZone; + + +/** + * + */ +public class ISO8601DateTest { + + private static final long PERIOD = 24L*24L*60L*60L*1000L+2000L; + + @Test + public void testParseUTC() { + long t = 0; + // london may not be correct + TimeZone london = new SimpleTimeZone(0, + "Europe/London", + Calendar.MARCH, -1, Calendar.SUNDAY, + 3600000, SimpleTimeZone.UTC_TIME, + Calendar.OCTOBER, -1, Calendar.SUNDAY, + 3600000, SimpleTimeZone.UTC_TIME, + 3600000); + TimeZone paris = new SimpleTimeZone(3600000, + "Europe/Paris", + Calendar.MARCH, -1, Calendar.SUNDAY, + 3600000, SimpleTimeZone.UTC_TIME, + Calendar.OCTOBER, -1, Calendar.SUNDAY, + 3600000, SimpleTimeZone.UTC_TIME, + 3600000); + TimeZone la = new SimpleTimeZone(-28800000, + "America/Los_Angeles", + Calendar.APRIL, 1, -Calendar.SUNDAY, + 7200000, + Calendar.OCTOBER, -1, Calendar.SUNDAY, + 7200000, + 3600000); + for (int i = 0; i < 1000; i++ ) { + t += PERIOD; + ISO8601Date g = new ISO8601Date(); + g.setTimeZone(london); + g.setTimeInMillis(t); + String tt = g.toString(); + ISO8601Date g2 = new ISO8601Date(tt); + Assert.assertEquals(g.getTimeInMillis(), g2.getTimeInMillis()); + Assert.assertEquals(g.toString(), g2.toString()); + } + for (int i = 0; i < 1000; i++ ) { + t += PERIOD; + ISO8601Date g = new ISO8601Date(); + g.setTimeZone(paris); + g.setTimeInMillis(t); + String tt = g.toString(); + ISO8601Date g2 = new ISO8601Date(tt); + Assert.assertEquals(g.getTimeInMillis(), g2.getTimeInMillis()); + Assert.assertEquals(g.toString(), g2.toString()); + } + for (int i = 0; i < 1000; i++ ) { + t += PERIOD; + ISO8601Date g = new ISO8601Date(); + g.setTimeZone(la); + g.setTimeInMillis(t); + String tt = g.toString(); + ISO8601Date g2 = new ISO8601Date(tt); + Assert.assertEquals(g.getTimeInMillis(), g2.getTimeInMillis()); + Assert.assertEquals(g.toString(), g2.toString()); + } + } + + @Test + public void testDate() { + ISO8601Date g = new ISO8601Date(); + g.set(2010, 11, 24); + g.setDate(true); + Assert.assertEquals("2010-12-24", g.toString()); + } + + @Test + public void testBefore() { + ISO8601Date g = new ISO8601Date(); + g.set(2011,11,23); + Calendar c = Calendar.getInstance(); + c.set(2011, 11, 20); + Assert.assertTrue(g.compareTo(c)>0); + g.setDate(true); + Assert.assertTrue(g.compareTo(c)>0); + + + } +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/CheckRowHash.java b/core/src/test/java/org/sakaiproject/nakamura/lite/CheckRowHash.java new file mode 100644 index 00000000..add93dbb --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/CheckRowHash.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +public class CheckRowHash { + + private static final String USAGE = "Generate a RowHash\n" + + "java "+CheckRowHash.class.getName()+" [ properties = ImmutableMap.of(ConfigurationImpl.INDEX_COLUMN_NAMES, (Object)ConfigurationImpl.DEFAULT_INDEX_COLUMN_NAMES); + configurationImpl.activate(properties); + Assert.assertEquals("n",configurationImpl.getKeySpace()); + } + @Test + public void testPropertiesOSGiOverride() throws IOException { + InternalContentAccess.resetInternalContent(); + ConfigurationImpl configurationImpl = new ConfigurationImpl(); + Map properties = ImmutableMap.of(ConfigurationImpl.INDEX_COLUMN_NAMES,(Object)"somethingElse"); + configurationImpl.activate(properties); + Assert.assertArrayEquals(new String[]{"somethingElse"}, configurationImpl.getIndexColumnNames()); + } + @Test + public void testPropertiesSharedOverride() throws IOException { + InternalContentAccess.resetInternalContent(); + ConfigurationImpl configurationImpl = new ConfigurationImpl(); + System.setProperty(ConfigurationImpl.SHAREDCONFIGPROPERTY, "src/test/resources/testsharedoverride.properties"); + Map properties = ImmutableMap.of(ConfigurationImpl.INDEX_COLUMN_NAMES, (Object)ConfigurationImpl.DEFAULT_INDEX_COLUMN_NAMES); + configurationImpl.activate(properties); + System.clearProperty(ConfigurationImpl.SHAREDCONFIGPROPERTY); + Assert.assertArrayEquals(new String[]{"somethingElseFromProperties"}, configurationImpl.getIndexColumnNames()); + } + @Test + public void testPropertiesSharedOverrideOSGi() throws IOException { + InternalContentAccess.resetInternalContent(); + ConfigurationImpl configurationImpl = new ConfigurationImpl(); + System.setProperty(ConfigurationImpl.SHAREDCONFIGPROPERTY, "src/test/resources/testsharedoverride.properties"); + Map properties = ImmutableMap.of(ConfigurationImpl.INDEX_COLUMN_NAMES,(Object)"somethingElse"); + configurationImpl.activate(properties); + System.clearProperty(ConfigurationImpl.SHAREDCONFIGPROPERTY); + Assert.assertArrayEquals(new String[]{"somethingElse"}, configurationImpl.getIndexColumnNames()); + } +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/DummyStorageCacheManager.java b/core/src/test/java/org/sakaiproject/nakamura/lite/DummyStorageCacheManager.java new file mode 100644 index 00000000..7ef14fab --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/DummyStorageCacheManager.java @@ -0,0 +1,33 @@ +package org.sakaiproject.nakamura.lite; + +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.StorageCacheManager; +import org.sakaiproject.nakamura.lite.storage.spi.ConcurrentLRUMap; + +public class DummyStorageCacheManager implements StorageCacheManager { + + private Map cache = new ConcurrentLRUMap(); + + @Override + public Map getAccessControlCache() { + return cache; + } + + @Override + public Map getAuthorizableCache() { + return cache; + } + + @Override + public Map getContentCache() { + return cache; + } + + @Override + public Map getCache(String cacheName) { + return cache; + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/OSGiStoreListenerTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/OSGiStoreListenerTest.java new file mode 100644 index 00000000..d6a18350 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/OSGiStoreListenerTest.java @@ -0,0 +1,61 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite; + +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.osgi.service.event.EventAdmin; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; + +import java.util.Map; + +public class OSGiStoreListenerTest { + + @Mock + private EventAdmin eventAdmin; + + public OSGiStoreListenerTest() { + MockitoAnnotations.initMocks(this); + } + + @Test + public void test() { + OSGiStoreListener l = new OSGiStoreListener(); + l.eventAdmin = eventAdmin; + Map testMap = ImmutableMap.of("test", (Object) new String[]{"an", "array"}); + for (String zone : new String[] { Security.ADMIN_AUTHORIZABLES, Security.ADMIN_GROUPS, + Security.ADMIN_USERS, Security.ZONE_ADMIN, Security.ZONE_AUTHORIZABLES, + Security.ZONE_CONTENT }) { + l.onDelete(zone, "path", "user", "x", null); + l.onDelete(zone, "path", "user", null, testMap, (String[]) null); + l.onDelete(zone, "path", "user", "x", null, "xx"); + l.onDelete(zone, "path", "user", null, testMap, "x:x"); + l.onDelete(zone, null, "user", "x", null, "x:x", "x:x"); + l.onUpdate(zone, "path", "user", null, true, null); + l.onUpdate(zone, "path", "user", "x", false, testMap, (String[]) null); + l.onUpdate(zone, "path", "user", null, true, null, "xx"); + l.onUpdate(zone, "path", "user", "x", false, testMap, "x:x"); + l.onUpdate(zone, null, "user", null, true, null, "x:x", "x:x"); + } + l.onLogin("userId", "sessionId"); + l.onLogout("userId", "sessoionID"); + } +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/RepositoryImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/RepositoryImplTest.java similarity index 88% rename from src/test/java/org/sakaiproject/nakamura/lite/RepositoryImplTest.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/RepositoryImplTest.java index 0ca94b2f..5537ebc2 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/RepositoryImplTest.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/RepositoryImplTest.java @@ -25,19 +25,21 @@ import org.junit.Before; import org.junit.Test; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.Session; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; import org.sakaiproject.nakamura.api.lite.authorizable.AuthorizableManager; import org.sakaiproject.nakamura.api.lite.authorizable.User; import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; import java.util.Map; public class RepositoryImplTest { @@ -49,9 +51,7 @@ public class RepositoryImplTest { @Before public void before() throws StorageClientException, AccessDeniedException, ClientPoolException, - ClassNotFoundException { - clientPool = getClientPool(); - client = clientPool.getClient(); + ClassNotFoundException, IOException { configuration = new ConfigurationImpl(); Map properties = Maps.newHashMap(); properties.put("keyspace", "n"); @@ -59,6 +59,8 @@ public void before() throws StorageClientException, AccessDeniedException, Clien properties.put("authorizable-column-family", "au"); properties.put("content-column-family", "cn"); configuration.activate(properties); + clientPool = getClientPool(configuration); + client = clientPool.getClient(); AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, configuration); authorizableActivator.setup(); @@ -104,10 +106,11 @@ public void testStart() throws ClientPoolException, StorageClientException, } - protected StorageClientPool getClientPool() throws ClassNotFoundException { + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { MemoryStorageClientPool cp = new MemoryStorageClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/Type1UUIDTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/Type1UUIDTest.java new file mode 100644 index 00000000..2f41fa67 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/Type1UUIDTest.java @@ -0,0 +1,72 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite; + +import junit.framework.Assert; + +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public class Type1UUIDTest { + + + protected static final Logger LOGGER = LoggerFactory.getLogger(Type1UUIDTest.class); + protected static final int TEST_SIZE = 10000; + private static final int N_THREADS = 4; + protected Map check = new ConcurrentHashMap(TEST_SIZE*N_THREADS); + private int errors; + + @Test + public void testType1UUID() { + errors = 0; + Thread[] t = new Thread[N_THREADS]; + for ( int i = 0; i < t.length; i++ ) { + t[i] = new Thread(new Runnable() { + + + public void run() { + String id = null; + for ( int i = 0; i < TEST_SIZE; i++ ) { + id = StorageClientUtils.getUuid(); + if ( check.containsKey(id)) { + LOGGER.error("Collision {} ago {} ",id, System.currentTimeMillis()-check.get(id)); + errors++; + } + check.put(id,System.currentTimeMillis()); + } + LOGGER.info("Completed {} last ID is {} ",TEST_SIZE, id); + }; + }); + t[i].start(); + } + for ( int i = 0; i < t.length; i++ ) { + try { + t[i].join(); + } catch (InterruptedException e) { + LOGGER.error(e.getMessage(),e); + } + } + Assert.assertEquals("Collided "+errors+" times out of "+(TEST_SIZE*N_THREADS)+" ie about "+((100*errors)/(TEST_SIZE*N_THREADS))+"%, ",0, errors); + + } +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/accesscontrol/AbstractAccessControlManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/accesscontrol/AbstractAccessControlManagerImplTest.java new file mode 100644 index 00000000..322a5347 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/accesscontrol/AbstractAccessControlManagerImplTest.java @@ -0,0 +1,693 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.accesscontrol; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Ordering; +import com.google.common.collect.Sets; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessControlManager; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification.Operation; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permission; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalTokenResolver; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorPlugin; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.authorizable.AuthorizableManager; +import org.sakaiproject.nakamura.api.lite.authorizable.Group; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.LoggingStorageListener; +import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; +import org.sakaiproject.nakamura.lite.authorizable.AuthorizableManagerImpl; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.sakaiproject.nakamura.lite.BaseMemoryRepository; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +public abstract class AbstractAccessControlManagerImplTest { + private static final Logger LOGGER = LoggerFactory + .getLogger(AbstractAccessControlManagerImplTest.class); + private StorageClient client; + private ConfigurationImpl configuration; + private StorageClientPool clientPool; + private PrincipalValidatorResolver principalValidatorResolver = new PrincipalValidatorResolverImpl(); + + @Before + public void before() throws StorageClientException, AccessDeniedException, ClientPoolException, + ClassNotFoundException, IOException { + configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", Security.ZONE_AUTHORIZABLES); + configuration.activate(properties); + clientPool = getClientPool(configuration); + client = clientPool.getClient(); + AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, + configuration); + authorizableActivator.setup(); + LOGGER.info("Setup Complete"); + } + + protected abstract StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException; + + @After + public void after() throws ClientPoolException { + client.close(); + } + + @Test + public void test() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl authenticator = new AuthenticatorImpl(client, configuration, null); + User currentUser = authenticator.authenticate("admin", "admin"); + String u1 = "user1-" + System.currentTimeMillis(); + String u2 = "user2-" + System.currentTimeMillis(); + String u3 = "user3-" + System.currentTimeMillis(); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + AclModification user1 = new AclModification(AclModification.grantKey(u1), + Permissions.CAN_ANYTHING.combine(Permissions.CAN_ANYTHING_ACL).getPermission(), + AclModification.Operation.OP_REPLACE); + AclModification user2 = new AclModification(AclModification.grantKey(u2), + Permissions.CAN_READ.combine(Permissions.CAN_WRITE).combine(Permissions.CAN_DELETE) + .getPermission(), AclModification.Operation.OP_REPLACE); + AclModification user3 = new AclModification(AclModification.grantKey(u3), + Permissions.CAN_READ.getPermission(), AclModification.Operation.OP_REPLACE); + String basepath = "testpath" + System.currentTimeMillis(); + + accessControlManagerImpl.setAcl(Security.ZONE_AUTHORIZABLES, basepath, + new AclModification[] { user1, user2, user3 }); + + Map acl = accessControlManagerImpl.getAcl(Security.ZONE_AUTHORIZABLES, + basepath); + Assert.assertEquals(Integer.toHexString(Permissions.CAN_ANYTHING.combine( + Permissions.CAN_ANYTHING_ACL).getPermission()), Integer.toHexString((Integer) acl + .get(AclModification.grantKey(u1)))); + Assert.assertEquals( + Permissions.CAN_READ.combine(Permissions.CAN_WRITE).combine(Permissions.CAN_DELETE) + .getPermission(), + ((Integer) acl.get(AclModification.grantKey(u2))).intValue()); + Assert.assertEquals(Permissions.CAN_READ.getPermission(), + ((Integer) acl.get(AclModification.grantKey(u3))).intValue()); + for (Entry e : acl.entrySet()) { + LOGGER.info(" ACE {} : {} ", e.getKey(), e.getValue()); + } + LOGGER.info("Got ACL {}", acl); + + } + + @Test + public void testKern1515() throws Exception { + AuthenticatorImpl authenticator = new AuthenticatorImpl(client, configuration, null); + User currentUser = authenticator.authenticate("admin", "admin"); + String u3 = "user3-" + System.currentTimeMillis(); + String basepath = "testpath" + System.currentTimeMillis(); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, null, new LoggingStorageListener()); + authorizableManager.createUser(u3, "User 3", "test", + ImmutableMap.of("test", (Object) "test")); + + AclModification user3canRead = new AclModification(AclModification.grantKey(u3), + Permissions.CAN_READ.getPermission(), AclModification.Operation.OP_OR); + + AclModification user3canWrite = new AclModification(AclModification.grantKey(u3), + Permissions.CAN_WRITE.getPermission(), AclModification.Operation.OP_OR); + + AclModification user3cannotWrite = new AclModification(AclModification.denyKey(u3), + Permissions.CAN_WRITE.getPermission(), AclModification.Operation.OP_OR); + + accessControlManagerImpl.setAcl(Security.ZONE_CONTENT, basepath + "/zach", + new AclModification[] { user3canRead, user3canWrite }); + + accessControlManagerImpl.setAcl(Security.ZONE_CONTENT, basepath + "/zach", + new AclModification[] { user3cannotWrite }); + + Assert.assertFalse("User should not be able to write.", accessControlManagerImpl.can( + authorizableManager.findAuthorizable(u3), Security.ZONE_CONTENT, + basepath + "/zach", Permissions.CAN_WRITE)); + Assert.assertTrue("User should be able to read.", accessControlManagerImpl.can( + authorizableManager.findAuthorizable(u3), Security.ZONE_CONTENT, + basepath + "/zach", Permissions.CAN_READ)); + } + + @Test + public void testPrivileges() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl authenticator = new AuthenticatorImpl(client, configuration, null); + User currentUser = authenticator.authenticate("admin", "admin"); + String u1 = "user1-" + System.currentTimeMillis(); + String u2 = "user2-" + System.currentTimeMillis(); + String u3 = "user3-" + System.currentTimeMillis(); + String basepath = "testpath" + System.currentTimeMillis(); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + AclModification user1CanAnything = new AclModification(AclModification.grantKey(u1), + Permissions.CAN_ANYTHING.combine(Permissions.CAN_ANYTHING_ACL).getPermission(), + AclModification.Operation.OP_REPLACE); + AclModification user2CantReadWrite = new AclModification(AclModification.denyKey(u2), + Permissions.CAN_READ.combine(Permissions.CAN_WRITE).combine(Permissions.CAN_DELETE) + .getPermission(), AclModification.Operation.OP_REPLACE); + AclModification user3cantRead = new AclModification(AclModification.denyKey(u3), + Permissions.CAN_READ.getPermission(), AclModification.Operation.OP_REPLACE); + + AclModification denyAnon = new AclModification(AclModification.denyKey(User.ANON_USER), + Permissions.ALL.getPermission(), AclModification.Operation.OP_REPLACE); + AclModification denyEveryone = new AclModification(AclModification.denyKey(Group.EVERYONE), + Permissions.ALL.getPermission(), AclModification.Operation.OP_REPLACE); + + AclModification user2CanReadWrite = new AclModification(AclModification.grantKey(u2), + Permissions.CAN_READ.combine(Permissions.CAN_WRITE).combine(Permissions.CAN_DELETE) + .getPermission(), AclModification.Operation.OP_REPLACE); + AclModification user3canRead = new AclModification(AclModification.grantKey(u3), + Permissions.CAN_READ.getPermission(), AclModification.Operation.OP_REPLACE); + + accessControlManagerImpl.setAcl(Security.ZONE_CONTENT, basepath + "/a/b/c", + new AclModification[] { user1CanAnything, user2CantReadWrite, user3cantRead, + denyAnon, denyEveryone }); + accessControlManagerImpl.setAcl(Security.ZONE_CONTENT, basepath + "/a/b", + new AclModification[] { user1CanAnything, user2CanReadWrite }); + accessControlManagerImpl.setAcl(Security.ZONE_CONTENT, basepath + "/a", + new AclModification[] { user1CanAnything, user3canRead }); + + Map acl = accessControlManagerImpl.getAcl(Security.ZONE_CONTENT, basepath); + Assert.assertArrayEquals(new String[] {}, acl.keySet().toArray()); + + acl = accessControlManagerImpl.getAcl(Security.ZONE_CONTENT, basepath + "/a"); + acl = StorageClientUtils.getFilterMap(acl, null, null, + ImmutableSet.of("_aclKey", "_aclPath", "_aclType"), false); + Assert.assertArrayEquals(Arrays.toString(sortToArray(acl.keySet())), new String[] { + AclModification.grantKey(u1), AclModification.grantKey(u3) }, + sortToArray(acl.keySet())); + acl = accessControlManagerImpl.getAcl(Security.ZONE_CONTENT, basepath + "/a/b"); + acl = StorageClientUtils.getFilterMap(acl, null, null, + ImmutableSet.of("_aclKey", "_aclPath", "_aclType"), false); + Assert.assertArrayEquals( + new String[] { AclModification.grantKey(u1), AclModification.grantKey(u2) }, + sortToArray(acl.keySet())); + acl = accessControlManagerImpl.getAcl(Security.ZONE_CONTENT, basepath + "/a/b/c"); + acl = StorageClientUtils.getFilterMap(acl, null, null, + ImmutableSet.of("_aclKey", "_aclPath", "_aclType"), false); + Assert.assertArrayEquals(new String[] { AclModification.denyKey(User.ANON_USER), + AclModification.denyKey(Group.EVERYONE), AclModification.grantKey(u1), + AclModification.denyKey(u2), AclModification.denyKey(u3) }, + sortToArray(acl.keySet())); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, null, new LoggingStorageListener()); + authorizableManager.createUser(u1, "User 1", "test", + ImmutableMap.of("test", (Object) "test")); + authorizableManager.createUser(u2, "User 2", "test", + ImmutableMap.of("test", (Object) "test")); + authorizableManager.createUser(u3, "User 3", "test", + ImmutableMap.of("test", (Object) "test")); + + User user1 = (User) authorizableManager.findAuthorizable(u1); + User user2 = (User) authorizableManager.findAuthorizable(u2); + User user3 = (User) authorizableManager.findAuthorizable(u3); + User adminUser = (User) authorizableManager.findAuthorizable(User.ADMIN_USER); + User anonUser = (User) authorizableManager.findAuthorizable(User.ANON_USER); + Group everyoneGroup = (Group) authorizableManager.findAuthorizable(Group.EVERYONE); + + Assert.assertNotNull(user1); + Assert.assertNotNull(user2); + Assert.assertNotNull(user3); + Assert.assertNotNull(adminUser); + Assert.assertNotNull(anonUser); + Assert.assertNotNull(everyoneGroup); + + Assert.assertTrue(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath, + Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath, + Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(adminUser, Security.ZONE_CONTENT, basepath, + Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath, + Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath, + Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath, + Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath, + Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath, + Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath, + Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, + basepath, Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, + basepath, Permissions.ALL)); + + Assert.assertTrue(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath + + "/a/b/c", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(adminUser, Security.ZONE_CONTENT, basepath + + "/a/b/c", Permissions.ALL)); + Assert.assertFalse(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath + + "/a/b/c", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath + + "/a/b/c", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, + basepath + "/a/b/c", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath + + "/a/b/c", Permissions.CAN_READ)); + + Assert.assertTrue(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath + + "/a/b", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(adminUser, Security.ZONE_CONTENT, basepath + + "/a/b", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl + .can(user2, Security.ZONE_CONTENT, basepath + "/a/b", Permissions.CAN_WRITE + .combine(Permissions.CAN_READ).combine(Permissions.CAN_DELETE))); + Assert.assertFalse(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath + + "/a/b", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath + + "/a/b", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath + + "/a/b", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath + + "/a/b", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath + + "/a/b", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, + basepath + "/a/b", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, + basepath + "/a/b", Permissions.ALL)); + + Assert.assertTrue(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath + + "/a", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(adminUser, Security.ZONE_CONTENT, basepath + + "/a", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath + + "/a", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath + + "/a", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath + + "/a", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath + + "/a", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath + + "/a", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath + + "/a", Permissions.ALL)); + Assert.assertTrue(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, + basepath + "/a", Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, + basepath + "/a", Permissions.ALL)); + + String[] testpaths = { basepath, basepath + "/a", basepath + "/a/b", basepath + "/a/b/c", }; + + checkPermissions(user1, testpaths, new Permission[][] { + { Permissions.CAN_READ }, + { Permissions.CAN_READ, Permissions.CAN_WRITE, Permissions.CAN_DELETE, + Permissions.CAN_READ_ACL, Permissions.CAN_WRITE_ACL, + Permissions.CAN_DELETE_ACL, Permissions.CAN_MANAGE, Permissions.ALL }, + { Permissions.CAN_READ, Permissions.CAN_WRITE, Permissions.CAN_DELETE, + Permissions.CAN_READ_ACL, Permissions.CAN_WRITE_ACL, + Permissions.CAN_DELETE_ACL, Permissions.CAN_MANAGE, Permissions.ALL }, + { Permissions.CAN_READ, Permissions.CAN_WRITE, Permissions.CAN_DELETE, + Permissions.CAN_READ_ACL, Permissions.CAN_WRITE_ACL, + Permissions.CAN_DELETE_ACL, Permissions.CAN_MANAGE, Permissions.ALL } }, + new String[][] { { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE }, + { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u3 }, + { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u2, u3 }, + { User.ADMIN_USER, u1 } }, new String[][] { {}, {}, {}, + { User.ANON_USER, Group.EVERYONE, u2, u3 } }); + checkPermissions(user2, testpaths, new Permission[][] { { Permissions.CAN_READ }, + { Permissions.CAN_READ }, + { Permissions.CAN_READ, Permissions.CAN_WRITE, Permissions.CAN_DELETE }, {} }, + new String[][] { { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE }, + { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u3 }, + { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u2, u3 }, + { User.ADMIN_USER, u1 } }, new String[][] { {}, {}, {}, + { User.ANON_USER, Group.EVERYONE, u2, u3 } }); + checkPermissions(user3, testpaths, new Permission[][] { { Permissions.CAN_READ }, + { Permissions.CAN_READ }, { Permissions.CAN_READ }, {} }, new String[][] { + { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE }, + { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u3 }, + { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u2, u3 }, + { User.ADMIN_USER, u1 } }, new String[][] { {}, {}, {}, + { User.ANON_USER, Group.EVERYONE, u2, u3 } }); + + } + + private void checkPermissions(User u, String[] testPath, Object[][] expectedPermissions, + String[][] readers, String[][] deniedReaders) throws StorageClientException { + AccessControlManagerImpl acmU = new AccessControlManagerImpl(client, u, configuration, + null, new LoggingStorageListener(), principalValidatorResolver); + + for (int i = 0; i < testPath.length; i++) { + Permission[] p = acmU.getPermissions(Security.ZONE_CONTENT, testPath[i]); + LOGGER.info("Got {} {} {} ", + new Object[] { u.getId(), testPath[i], Arrays.toString(p) }); + Assert.assertArrayEquals(expectedPermissions[i], p); + String[] r = acmU.findPrincipals(Security.ZONE_CONTENT, testPath[i], + Permissions.CAN_READ.getPermission(), true); + Assert.assertArrayEquals(readers[i], sortToArray(ImmutableSet.copyOf(r))); + r = acmU.findPrincipals(Security.ZONE_CONTENT, testPath[i], + Permissions.CAN_READ.getPermission(), false); + Assert.assertArrayEquals(deniedReaders[i], sortToArray(ImmutableSet.copyOf(r))); + } + + } + + private String[] sortToArray(Set keySet) { + return Ordering.natural().sortedCopy(keySet).toArray(new String[keySet.size()]); + } + + @Test + public void testTokenPermission() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl authenticator = new AuthenticatorImpl(client, configuration, null); + User currentUser = authenticator.authenticate("admin", "admin"); + String u3 = "user3-" + System.currentTimeMillis(); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, null, new LoggingStorageListener()); + authorizableManager.createUser(u3, "User 3", "test", + ImmutableMap.of("test", (Object) "test")); + Authorizable user3Auth = authorizableManager.findAuthorizable(u3); + String targetContentPath = "targetContentPath" + System.currentTimeMillis(); + int grantedBitmap = Permissions.CAN_WRITE.getPermission(); + int deniedBitmap = Permissions.CAN_MANAGE.getPermission(); + String aclID = Integer.toHexString(grantedBitmap) + "_" + Integer.toHexString(deniedBitmap); + String tokenPrincipal = AccessControlManager.DYNAMIC_PRINCIPAL_STEM + aclID; + + // grant access to the token, but deny access to everyone else. + accessControlManagerImpl.setAcl( + Security.ZONE_CONTENT, + targetContentPath, + new AclModification[] { + new AclModification(AclModification.denyKey(tokenPrincipal), deniedBitmap, + Operation.OP_REPLACE), + new AclModification(AclModification.grantKey(tokenPrincipal), + grantedBitmap, Operation.OP_REPLACE), + new AclModification(AclModification.denyKey(Group.EVERYONE), + Permissions.CAN_READ.getPermission(), Operation.OP_REPLACE), + new AclModification(AclModification.denyKey(User.ANON_USER), + Permissions.CAN_READ.getPermission(), Operation.OP_REPLACE) }); + // the tokens should not be setup, + final Content tokentContent = new Content("testtoken/" + tokenPrincipal, null); + accessControlManagerImpl.signContentToken(tokentContent, Security.ZONE_CONTENT, targetContentPath); + LOGGER.info("Checking Token {} ", tokentContent); + accessControlManagerImpl.setRequestPrincipalResolver(new PrincipalTokenResolver() { + public List resolveTokens(String principal) { + List tokens = Lists.newArrayList(); + tokens.add(tokentContent); + LOGGER.info("Principal {} checked tokens {}", principal, tokens); + return tokens; + } + }); + Assert.assertTrue(accessControlManagerImpl.can(user3Auth, Security.ZONE_CONTENT, + targetContentPath, Permissions.CAN_WRITE)); + Assert.assertFalse(accessControlManagerImpl.can(user3Auth, Security.ZONE_CONTENT, + targetContentPath, Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(user3Auth, Security.ZONE_CONTENT, + targetContentPath, Permissions.CAN_MANAGE)); + + accessControlManagerImpl.clearRequestPrincipalResolver(); + LOGGER.info("Done Checking token"); + } + + @Test + public void testTokenPermissionWithPlugin() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl authenticator = new AuthenticatorImpl(client, configuration, null); + User currentUser = authenticator.authenticate("admin", "admin"); + String u3 = "user3-" + System.currentTimeMillis(); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, null, new LoggingStorageListener()); + authorizableManager.createUser(u3, "User 3", "test", + ImmutableMap.of("test", (Object) "test")); + Authorizable user3Auth = authorizableManager.findAuthorizable(u3); + String targetContentPath = "targetContentPath" + System.currentTimeMillis(); + int grantedBitmap = Permissions.CAN_WRITE.getPermission(); + int deniedBitmap = Permissions.CAN_MANAGE.getPermission(); + String aclID = Integer.toHexString(grantedBitmap) + "_" + Integer.toHexString(deniedBitmap); + String tokenPrincipal = AccessControlManager.DYNAMIC_PRINCIPAL_STEM + aclID; + + // grant access to the token, but deny access to everyone else. + accessControlManagerImpl.setAcl( + Security.ZONE_CONTENT, + targetContentPath, + new AclModification[] { + new AclModification(AclModification.denyKey(tokenPrincipal), deniedBitmap, + Operation.OP_REPLACE), + new AclModification(AclModification.grantKey(tokenPrincipal), + grantedBitmap, Operation.OP_REPLACE), + new AclModification(AclModification.denyKey(Group.EVERYONE), + Permissions.CAN_READ.getPermission(), Operation.OP_REPLACE), + new AclModification(AclModification.denyKey(User.ANON_USER), + Permissions.CAN_READ.getPermission(), Operation.OP_REPLACE) }); + // the tokens should not be setup, + final Set checked = Sets.newHashSet(); + principalValidatorResolver.registerPlugin("testvalidator", new PrincipalValidatorPlugin() { + public boolean validate(Content proxyPrincipalToken) { + checked.add(proxyPrincipalToken); + return proxyPrincipalToken.hasProperty("protectedfield"); + } + + public String[] getProtectedFields() { + return new String[] { "protectedfield", "nullprotectedfield" }; + } + }); + final Content tokentContent = new Content("testtokenwithPlugin/" + tokenPrincipal, + ImmutableMap.of(PrincipalTokenValidator.VALIDATORPLUGIN, (Object) "testvalidator", + "protectedfield", "protected")); + accessControlManagerImpl.signContentToken(tokentContent, Security.ZONE_CONTENT, targetContentPath); + LOGGER.info("Checking Token {} ", tokentContent); + accessControlManagerImpl.setRequestPrincipalResolver(new PrincipalTokenResolver() { + public List resolveTokens(String principal) { + List tokens = Lists.newArrayList(); + tokens.add(tokentContent); + LOGGER.info("Principal {} checked tokens {}", principal, tokens); + return tokens; + } + }); + Assert.assertTrue(accessControlManagerImpl.can(user3Auth, Security.ZONE_CONTENT, + targetContentPath, Permissions.CAN_WRITE)); + Assert.assertFalse(accessControlManagerImpl.can(user3Auth, Security.ZONE_CONTENT, + targetContentPath, Permissions.CAN_READ)); + Assert.assertFalse(accessControlManagerImpl.can(user3Auth, Security.ZONE_CONTENT, + targetContentPath, Permissions.CAN_MANAGE)); + + Assert.assertEquals(1, checked.size()); + accessControlManagerImpl.clearRequestPrincipalResolver(); + LOGGER.info("Done Checking token"); + principalValidatorResolver.unregisterPlugin("testvalidator"); + + } + + @Test + public void testAccessInheritance() throws Exception { + // for KERN-2158 + // all our dependencies + Repository repository = (Repository) new BaseMemoryRepository().getRepository(); + Session adminSession = repository.loginAdministrative(); + AuthorizableManager adminAuthorizableManager = adminSession.getAuthorizableManager(); + ContentManager adminContentManager = adminSession.getContentManager(); + AccessControlManager adminAccessControlManager = adminSession.getAccessControlManager(); + + // create two users + Assert.assertTrue(adminAuthorizableManager.createUser("suzy", "suzy", "secret", + ImmutableMap.of("firstName", (Object) "Suzy", "lastName", "Queue"))); + Assert.assertTrue(adminAuthorizableManager.createUser("zach", "zach", "secret", + ImmutableMap.of("firstName", (Object) "Zach", "lastName", "Thomas"))); + + // Create the innermost group and make suzy a member. + Group group; + Assert.assertTrue(adminAuthorizableManager.createGroup("inner", "inner", null)); + group = (Group) adminAuthorizableManager.findAuthorizable("inner"); + group.addMember("suzy"); + adminAuthorizableManager.updateAuthorizable(group); + adminAccessControlManager.setAcl(Security.ZONE_AUTHORIZABLES, "inner", + new AclModification[] { new AclModification("inner@g", Permissions.CAN_READ.getPermission(), + AclModification.Operation.OP_REPLACE) }); + adminAccessControlManager.setAcl(Security.ZONE_AUTHORIZABLES, "inner", + new AclModification[] { new AclModification("everyone@d", Permissions.ALL.getPermission(), + AclModification.Operation.OP_REPLACE) }); + adminAccessControlManager.setAcl(Security.ZONE_AUTHORIZABLES, "inner", + new AclModification[] { new AclModification("anonymous@d", Permissions.ALL.getPermission(), + AclModification.Operation.OP_REPLACE) }); + adminAuthorizableManager.updateAuthorizable(group); + + // Create a wrapper group. + Assert.assertTrue(adminAuthorizableManager.createGroup("wrapper", "wrapper", null)); + group = (Group) adminAuthorizableManager.findAuthorizable("wrapper"); + group.addMember("inner"); + adminAuthorizableManager.updateAuthorizable(group); + adminAccessControlManager.setAcl(Security.ZONE_AUTHORIZABLES, "wrapper", + new AclModification[] { new AclModification("wrapper@g", Permissions.CAN_READ.getPermission(), + AclModification.Operation.OP_REPLACE) }); + adminAccessControlManager.setAcl(Security.ZONE_AUTHORIZABLES, "wrapper", + new AclModification[] { new AclModification("everyone@d", Permissions.ALL.getPermission(), + AclModification.Operation.OP_REPLACE) }); + adminAccessControlManager.setAcl(Security.ZONE_AUTHORIZABLES, "wrapper", + new AclModification[] { new AclModification("anonymous@d", Permissions.ALL.getPermission(), + AclModification.Operation.OP_REPLACE) }); + adminAuthorizableManager.updateAuthorizable(group); + + // Create some content to test. + adminContentManager.update(new Content("a:wrapper", null)); + adminAccessControlManager.setAcl(Security.ZONE_CONTENT, "a:wrapper", + new AclModification[] { new AclModification("wrapper@g", Permissions.CAN_READ.getPermission(), + AclModification.Operation.OP_REPLACE) }); + adminAccessControlManager.setAcl(Security.ZONE_CONTENT, "a:wrapper", + new AclModification[] { new AclModification("everyone@d", Permissions.ALL.getPermission(), + AclModification.Operation.OP_REPLACE) }); + adminAccessControlManager.setAcl(Security.ZONE_CONTENT, "a:wrapper", + new AclModification[] { new AclModification("anonymous@d", Permissions.ALL.getPermission(), + AclModification.Operation.OP_REPLACE) }); + + // Start a new session. + adminSession.logout(); + Session adminSession2 = repository.loginAdministrative(); + AuthorizableManager adminAuthorizableManager2 = adminSession2.getAuthorizableManager(); + AccessControlManager adminAccessControlManager2 = adminSession2.getAccessControlManager(); + + // make sure suzy can read + Authorizable suzy = adminAuthorizableManager2.findAuthorizable("suzy"); + Assert.assertTrue(adminAccessControlManager2.can(suzy, Security.ZONE_AUTHORIZABLES, "inner", + Permissions.CAN_READ)); + Assert.assertTrue(adminAccessControlManager2.can(suzy, Security.ZONE_AUTHORIZABLES, + "wrapper", Permissions.CAN_READ)); + Assert.assertFalse(adminAccessControlManager2.can(suzy, Security.ZONE_AUTHORIZABLES, + "wrapper", Permissions.CAN_WRITE)); + Assert.assertFalse(adminAccessControlManager2.can(suzy, Security.ZONE_CONTENT, "a:wrapper", + Permissions.CAN_WRITE)); + Assert.assertTrue(adminAccessControlManager2.can(suzy, Security.ZONE_CONTENT, "a:wrapper", + Permissions.CAN_READ)); + + // Make sure zach cannot + Authorizable zach = adminAuthorizableManager2.findAuthorizable("zach"); + Assert.assertFalse(adminAccessControlManager2.can(zach, Security.ZONE_AUTHORIZABLES, + "wrapper", Permissions.CAN_READ)); + Assert.assertFalse(adminAccessControlManager2.can(zach, Security.ZONE_CONTENT, "a:wrapper", + Permissions.CAN_READ)); + + final Session normalSession = repository.loginAdministrative("suzy"); + final AuthorizableManager normalAuthorizableManager = normalSession + .getAuthorizableManager(); + final AccessControlManager normalAccessControlManager = normalSession + .getAccessControlManager(); + Authorizable normalSuzy = normalAuthorizableManager.findAuthorizable("suzy"); + Assert.assertTrue(normalAccessControlManager.can(normalSuzy, Security.ZONE_AUTHORIZABLES, + "wrapper", Permissions.CAN_READ)); + } + + @Test + public void testMoveAcl() throws Exception { + Repository repository = (Repository) new BaseMemoryRepository().getRepository(); + Session adminSession = repository.loginAdministrative(); + AuthorizableManager adminAuthorizableManager = adminSession.getAuthorizableManager(); + ContentManager adminContentManager = adminSession.getContentManager(); + AccessControlManager adminAccessControlManager = adminSession.getAccessControlManager(); + + // create a test user and some test permissions + String u1 = "user1-" + System.currentTimeMillis(); + adminAuthorizableManager.createUser(u1, u1, u1, null); + Authorizable user1 = adminAuthorizableManager.findAuthorizable(u1); + + String from = "a-test-node"; + String to = "another-test-node"; + + // verify user1 can read but can't write to the node which should be the default + Assert.assertTrue(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, from, Permissions.CAN_READ)); + Assert.assertFalse(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, from, Permissions.CAN_WRITE)); + + // add write permission for user1 to the from node + AclModification user1canWrite = new AclModification(AclModification.grantKey(u1), + Permissions.CAN_WRITE.getPermission(), AclModification.Operation.OP_OR); + + adminContentManager.update(new Content("a-test-node", null)); + adminAccessControlManager.setAcl(Security.ZONE_CONTENT, "a-test-node", + new AclModification[] { user1canWrite }); + + // verify user1 can read and write to the node + Assert.assertTrue(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, from, Permissions.CAN_READ)); + Assert.assertTrue(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, from, Permissions.CAN_WRITE)); + + // move the node + adminContentManager.move(from, to); + + // verify that the permissions came over with the node + adminAccessControlManager.can(user1, Security.ZONE_CONTENT, to, Permissions.CAN_READ); + Assert.assertTrue(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, to, Permissions.CAN_READ)); + Assert.assertTrue(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, to, Permissions.CAN_WRITE)); + + // verify the write permission is gone from the old node + Assert.assertTrue(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, from, Permissions.CAN_READ)); + Assert.assertFalse(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, from, Permissions.CAN_WRITE)); + + // try to move a node to itself. this should throw an exception + try { + adminContentManager.move(to, to); + Assert.fail("Should throw an exception when moving without force to a location that already exists."); + } catch (StorageClientException e) { + // expected + } + + // this should work fine + adminAccessControlManager.setAcl(Security.ZONE_CONTENT, "a-test-node", + new AclModification[] { user1canWrite }); + adminContentManager.move(to, from, true); + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/authorizable/AbstractAuthorizableManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/authorizable/AbstractAuthorizableManagerImplTest.java new file mode 100644 index 00000000..275fe481 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/authorizable/AbstractAuthorizableManagerImplTest.java @@ -0,0 +1,698 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.authorizable; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.authorizable.Group; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.LoggingStorageListener; +import org.sakaiproject.nakamura.lite.RepositoryImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.PrincipalValidatorResolverImpl; +import org.sakaiproject.nakamura.lite.storage.spi.ConcurrentLRUMap; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +public abstract class AbstractAuthorizableManagerImplTest { + + private static final Logger LOGGER = LoggerFactory + .getLogger(AbstractAuthorizableManagerImplTest.class); + private StorageClient client; + private ConfigurationImpl configuration; + private StorageClientPool clientPool; + private Map sharedCache = new ConcurrentLRUMap(1000); + private PrincipalValidatorResolver principalValidatorResolver = new PrincipalValidatorResolverImpl(); + + @Before + public void before() throws StorageClientException, AccessDeniedException, ClientPoolException, + ClassNotFoundException, IOException { + configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + configuration.activate(properties); + clientPool = getClientPool(configuration); + client = clientPool.getClient(); + AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, + configuration); + authorizableActivator.setup(); + LOGGER.info("Setup Complete"); + } + + protected abstract StorageClientPool getClientPool(Configuration configuration2) throws ClassNotFoundException; + + @After + public void after() throws ClientPoolException { + if (client != null) { + client.close(); + } + } + + @Test + public void testAuthorizableManager() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + Assert.assertNotNull(currentUser); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + Assert.assertNotNull(authorizableManager.findAuthorizable(User.ADMIN_USER)); + Assert.assertNotNull(authorizableManager.findAuthorizable(User.ANON_USER)); + Assert.assertEquals(currentUser, authorizableManager.getUser()); + } + + @Test + public void testAuthorizableManagerAccessDenied() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "wrong-password"); + + Assert.assertNull(currentUser); + } + + @Test + public void testAuthorizableManagerUserNotFound() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("nonuser", "wrong-password"); + + Assert.assertNull(currentUser); + } + + @Test + public void testAuthorizableManagerCheckUser() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + Authorizable a = authorizableManager.findAuthorizable(User.ADMIN_USER); + Authorizable an = authorizableManager.findAuthorizable(User.ANON_USER); + Authorizable missing = authorizableManager.findAuthorizable("missinguser"); + Assert.assertNull(missing); + Assert.assertNotNull(a); + Assert.assertNotNull(an); + Assert.assertFalse(a instanceof Group); + Assert.assertFalse(an instanceof Group); + User user = (User) a; + String[] principals = user.getPrincipals(); + Assert.assertNotNull(principals); + Assert.assertEquals(1, principals.length); + Assert.assertTrue(user.isAdmin()); + + User anon = (User) an; + principals = anon.getPrincipals(); + Assert.assertNotNull(principals); + Assert.assertEquals(0, principals.length); + Assert.assertFalse(anon.isAdmin()); + + } + + @Test + public void testAuthorizableManagerCreateUser() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + authorizableManager.delete("testuser"); + + Assert.assertTrue(authorizableManager.createUser("testuser", "Test User", "test", + ImmutableMap.of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers", Authorizable.AUTHORIZABLE_TYPE_FIELD, + Authorizable.GROUP_VALUE))); + Assert.assertFalse(authorizableManager.createUser("testuser", "Test User", "test", + ImmutableMap.of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers"))); + + Authorizable a = authorizableManager.findAuthorizable("testuser"); + Assert.assertNotNull(a); + Assert.assertFalse(a instanceof Group); + User user = (User) a; + String[] principals = user.getPrincipals(); + Assert.assertNotNull(principals); + LOGGER.info("Principals {} ", Arrays.toString(principals)); + Assert.assertEquals(3, principals.length); + Assert.assertTrue(user.isAdmin()); + + } + + @Test + public void testAuthorizableManagerCreateUserDenied() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + authorizableManager.delete("testuser2"); + + Assert.assertTrue(authorizableManager.createUser("testuser2", "Test User", "test", + ImmutableMap.of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "testers", Authorizable.AUTHORIZABLE_TYPE_FIELD, Authorizable.GROUP_VALUE))); + Assert.assertFalse(authorizableManager.createUser("testuser2", "Test User", "test", + ImmutableMap.of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers"))); + + Authorizable a = authorizableManager.findAuthorizable("testuser2"); + Assert.assertNotNull(a); + Assert.assertFalse(a instanceof Group); + User user = (User) a; + String[] principals = user.getPrincipals(); + LOGGER.info("Principals {} ", Arrays.toString(principals)); + Assert.assertArrayEquals(new String[] { "testers", Group.EVERYONE }, principals); + + Assert.assertFalse(user.isAdmin()); + + AccessControlManagerImpl userAccessControlManagerImpl = new AccessControlManagerImpl( + client, user, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + AuthorizableManagerImpl userAuthorizableManager = new AuthorizableManagerImpl(user, null, + client, configuration, userAccessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + try { + userAuthorizableManager.createUser("testuser3", "Test User", "test", ImmutableMap.of( + "testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers", Authorizable.AUTHORIZABLE_TYPE_FIELD, + Authorizable.GROUP_VALUE)); + Assert.fail(); + } catch (AccessDeniedException e) { + LOGGER.info(" Correctly denied access {} ", e.getMessage()); + } + + try { + userAuthorizableManager.createUser("testuser4", "Test User", "test", ImmutableMap.of( + "testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers")); + Assert.fail(); + } catch (AccessDeniedException e) { + LOGGER.info(" Correctly denied access {} ", e.getMessage()); + } + + } + + @Test + public void testAuthorizableManagerCreateGroup() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + authorizableManager.delete("user2"); + authorizableManager.delete("user3"); + authorizableManager.delete("testgroup"); + + Assert.assertTrue(authorizableManager.createUser("user2", "TestUser2", null, ImmutableMap + .of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers"))); + Assert.assertTrue(authorizableManager.createUser("user3", "TestUser", null, ImmutableMap + .of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers"))); + Assert.assertTrue(authorizableManager.createGroup("testgroup", "Test Group", ImmutableMap + .of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers", Authorizable.MEMBERS_FIELD, "user1;user2"))); + Assert.assertFalse(authorizableManager.createGroup("testgroup", "Test Group", ImmutableMap + .of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers", Authorizable.MEMBERS_FIELD, "user1;user2", + Authorizable.AUTHORIZABLE_TYPE_FIELD, Authorizable.GROUP_VALUE))); + + Authorizable a = authorizableManager.findAuthorizable("testgroup"); + Assert.assertNotNull(a); + Assert.assertTrue(a instanceof Group); + Group g = (Group) a; + String[] principals = g.getPrincipals(); + LOGGER.info("Principals {} ", Arrays.toString(principals)); + Assert.assertArrayEquals(new String[] { "administrators", "testers", Group.EVERYONE }, + principals); + String[] members = g.getMembers(); + LOGGER.info("Members {} ", Arrays.toString(members)); + Assert.assertArrayEquals(new String[] { "user1", "user2" }, members); + + g.setProperty("SomeValue", "AValue"); + g.setProperty(Authorizable.PASSWORD_FIELD, "badpassword"); + g.removeProperty("testkey"); + g.addPrincipal("tester2"); + g.removePrincipal("testers"); + // adding user 3 should make it a member of testgroup and give it the + // pricipal testgroup + g.addMember("user3"); + g.removeMember("user2"); + + principals = g.getPrincipals(); + List principalList = Lists.newArrayList(principals); + Collections.sort(principalList); + principals = principalList.toArray(new String[principalList.size()]); + LOGGER.info("Principals before save {} ", Arrays.toString(principals)); + Assert.assertArrayEquals(new String[] { "administrators", Group.EVERYONE, "tester2" }, + principals); + members = g.getMembers(); + LOGGER.info("Members {} ", Arrays.toString(members)); + Assert.assertArrayEquals(new String[] { "user1", "user3" }, members); + + LOGGER.info("Updating Group with changed membership ----------------------"); + authorizableManager.updateAuthorizable(g); + LOGGER.info("Done Updating Group with changed membership ----------------------"); + + Authorizable a2 = authorizableManager.findAuthorizable("testgroup"); + Assert.assertNotNull(a2); + Assert.assertTrue(a2 instanceof Group); + Group g2 = (Group) a2; + principals = g2.getPrincipals(); + LOGGER.info("Principals {} ", Arrays.toString(principals)); + principalList = Lists.newArrayList(principals); + Collections.sort(principalList); + principals = principalList.toArray(new String[principalList.size()]); + Assert.assertArrayEquals(new String[] { "administrators", Group.EVERYONE, "tester2" }, + principals); + members = g2.getMembers(); + LOGGER.info("Members {} ", Arrays.toString(members)); + Assert.assertArrayEquals(new String[] { "user1", "user3" }, members); + Assert.assertNull(g2.getProperty(Authorizable.PASSWORD_FIELD)); + + // Test that User3 no has testgroup as a principal. + Authorizable a3 = authorizableManager.findAuthorizable("user3"); + Assert.assertNotNull(a3); + Assert.assertFalse(a3 instanceof Group); + User u3 = (User) a3; + principals = u3.getPrincipals(); + LOGGER.info("Principals {} ", Arrays.toString(principals)); + Assert.assertArrayEquals(new String[] { "administrators", "testers", "testgroup", + Group.EVERYONE }, principals); + + } + + @Test + public void testFindAuthorizable() throws StorageClientException, AccessDeniedException { + try { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl( + client, currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + for (int i = 0; i < 10; i++) { + authorizableManager.delete("testfinduser" + i); + Assert.assertTrue(authorizableManager.createUser("testfinduser" + i, "TestUser", + null, ImmutableMap.of("rep:principalName", (Object) ("principal" + i), + "sakai:groupproperty", "groupprop", "sakai:userprop", "userprop"))); + authorizableManager.delete("testgroup" + i); + Assert.assertTrue(authorizableManager.createGroup("testgroup" + i, + "Test Group" + i, ImmutableMap.of("rep:principalName", + (Object) ("principal" + i), "sakai:groupproperty", "groupprop", + "sakai:grprop", "grprop"))); + } + for (int i = 0; i < 10; i++) { + Iterator userIterator = authorizableManager.findAuthorizable( + "rep:principalName", "principal" + i, User.class); + Assert.assertNotNull(userIterator); + Assert.assertTrue(userIterator.hasNext()); + Authorizable a = userIterator.next(); + Assert.assertFalse(userIterator.hasNext()); + Assert.assertTrue(a instanceof User); + User u = (User) a; + Assert.assertEquals("testfinduser" + i, u.getId()); + } + for (int i = 0; i < 10; i++) { + Iterator groupIterator = authorizableManager.findAuthorizable( + "rep:principalName", "principal" + i, Group.class); + Assert.assertNotNull(groupIterator); + Assert.assertTrue(groupIterator.hasNext()); + Authorizable a = groupIterator.next(); + Assert.assertFalse(groupIterator.hasNext()); + Assert.assertTrue(a instanceof Group); + Group u = (Group) a; + Assert.assertEquals("testgroup" + i, u.getId()); + } + for (int i = 0; i < 10; i++) { + Iterator groupIterator = authorizableManager.findAuthorizable( + "rep:principalName", "principal" + i, Authorizable.class); + Assert.assertNotNull(groupIterator); + Assert.assertTrue(groupIterator.hasNext()); + Authorizable a = groupIterator.next(); + if (a instanceof Group) { + Assert.assertEquals("testgroup" + i, a.getId()); + } else { + Assert.assertEquals("testfinduser" + i, a.getId()); + } + Assert.assertTrue(groupIterator.hasNext()); + a = groupIterator.next(); + if (a instanceof Group) { + Assert.assertEquals("testgroup" + i, a.getId()); + } else { + Assert.assertEquals("testfinduser" + i, a.getId()); + } + Assert.assertFalse(groupIterator.hasNext()); + } + } catch (UnsupportedOperationException e) { + LOGGER.warn("Finder methods not implemented, FIXME"); + } + + } + + @Test + public void testAuthorizableManagerNullProperties() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + authorizableManager.delete("testuser"); + + Assert.assertTrue(authorizableManager.createUser("testuser", "Test User", "test", null)); + Authorizable user = authorizableManager.findAuthorizable("testuser"); + Assert.assertNotNull(user); + Assert.assertTrue(user instanceof User); + + authorizableManager.delete("testgroup"); + Assert.assertTrue(authorizableManager.createGroup("testgroup", "Test Group", null)); + Authorizable group = authorizableManager.findAuthorizable("testgroup"); + Assert.assertNotNull(group); + Assert.assertTrue(group instanceof Group); + } + + @Test + public void testAuthorizableManagerTrigger() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + authorizableManager.delete("testuser"); + + Assert.assertTrue(authorizableManager.createUser("testuser", "Test User", "test", + ImmutableMap.of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers", Authorizable.AUTHORIZABLE_TYPE_FIELD, + Authorizable.GROUP_VALUE))); + Assert.assertFalse(authorizableManager.createUser("testuser", "Test User", "test", + ImmutableMap.of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers"))); + + authorizableManager.triggerRefresh("testuser"); + + } + + @Test + public void testAuthorizableManagerTriggerAll() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + authorizableManager.delete("testuser"); + + Assert.assertTrue(authorizableManager.createUser("testuser", "Test User", "test", + ImmutableMap.of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers", Authorizable.AUTHORIZABLE_TYPE_FIELD, + Authorizable.GROUP_VALUE))); + Assert.assertFalse(authorizableManager.createUser("testuser", "Test User", "test", + ImmutableMap.of("testkey", (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, + "administrators;testers"))); + + authorizableManager.triggerRefreshAll(); + + } + + @Test + public void testAuthorizablePermissions() throws ClientPoolException, StorageClientException, AccessDeniedException { + RepositoryImpl repository = new RepositoryImpl(configuration, clientPool, new LoggingStorageListener()); + Map properties = ImmutableMap.of("t", (Object) "x"); + repository.activate(properties); + + // create some users + + Session adminSession = repository.loginAdministrative(); + adminSession.getAuthorizableManager().createUser( + "testAuthorizablePermissions.user1", + "User1-testAuthorizablePermissions", + "password", + ImmutableMap.of("publicproperty", (Object) "publicvalue", "privateproperty", + "privatevalue", "protectedproperty", "protectedvalue")); + + adminSession.getAuthorizableManager().createUser( + "testAuthorizablePermissions.user2", + "User2-testAuthorizablePermissions", + "password2", + ImmutableMap.of("publicproperty", (Object) "publicvalue", "privateproperty", + "privatevalue", "protectedproperty", "protectedvalue")); + User user1 = (User) adminSession.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user1"); + User user2 = (User) adminSession.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user2"); + user1.setProperty("publicproperty", "publicvalue"); + user1.setProperty("privateproperty", "privatevalue"); + user1.setProperty("protectedproperty", "protectedvalue"); + adminSession.getAuthorizableManager().updateAuthorizable(user1); + user2.setProperty("publicproperty", "publicvalue"); + user2.setProperty("privateproperty", "privatevalue"); + user2.setProperty("protectedproperty", "protectedvalue"); + adminSession.getAuthorizableManager().updateAuthorizable(user2); + user1 = (User) adminSession.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user1"); + user2 = (User) adminSession.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user2"); + + List modifications = Lists.newArrayList(); + // grant user 1 read and write + AclModification.addAcl(true, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(user1.getId(), "privateproperty"), modifications); + AclModification.addAcl(false, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(User.ANON_USER, "privateproperty"), modifications); + AclModification.addAcl(false, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(Group.EVERYONE, "privateproperty"), modifications); + + // only grant user 1 read on the protected property + AclModification.addAcl(true, Permissions.CAN_READ_PROPERTY, + AclModification.getPropertyKey(user1.getId(), "protectedproperty"), modifications); + AclModification.addAcl(false, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(User.ANON_USER, "protectedproperty"), modifications); + AclModification.addAcl(false, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(Group.EVERYONE, "protectedproperty"), modifications); + + adminSession.getAccessControlManager().setAcl(Security.ZONE_AUTHORIZABLES, user1.getId(), + modifications.toArray(new AclModification[modifications.size()])); + + modifications.clear(); + // grant user 2 read and write + AclModification.addAcl(true, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(user2.getId(), "privateproperty"), modifications); + AclModification.addAcl(false, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(User.ANON_USER, "privateproperty"), modifications); + AclModification.addAcl(false, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(Group.EVERYONE, "privateproperty"), modifications); + + // only grant user 2 read on the protected property + AclModification.addAcl(true, Permissions.CAN_READ_PROPERTY, + AclModification.getPropertyKey(user2.getId(), "protectedproperty"), modifications); + AclModification.addAcl(false, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(User.ANON_USER, "protectedproperty"), modifications); + AclModification.addAcl(false, Permissions.CAN_ANYTHING_PROPERTY, + AclModification.getPropertyKey(Group.EVERYONE, "protectedproperty"), modifications); + + adminSession.getAccessControlManager().setAcl(Security.ZONE_AUTHORIZABLES, user2.getId(), + modifications.toArray(new AclModification[modifications.size()])); + + + adminSession.logout(); + + Session user1Session = repository.login(user1.getId(), "password"); + // locate user1 and user2 + User user1user1 = (User) user1Session.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user1"); + User user1user2 = (User) user1Session.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user2"); + // on user1, user1 should be able to read all properties + Assert.assertEquals("publicvalue", user1user1.getProperty("publicproperty")); + Assert.assertEquals("privatevalue", user1user1.getProperty("privateproperty")); + Assert.assertEquals("protectedvalue", user1user1.getProperty("protectedproperty")); + // on user2, user1 should only be able to read the public property + Assert.assertEquals("publicvalue", user1user2.getProperty("publicproperty")); + Assert.assertNull(user1user2.getProperty("privateproperty")); + Assert.assertNull(user1user2.getProperty("protectedproperty")); + // on user1, user1 should be able to set public value and private value + user1user1.setProperty("publicproperty","newpubvalue1"); + user1user1.setProperty("privateproperty","newprivvalue1"); + // but not the protected property. + user1user1.setProperty("protectedproperty","newprotectedvalue1"); + Assert.assertEquals("newpubvalue1", user1user1.getProperty("publicproperty")); + Assert.assertEquals("newprivvalue1", user1user1.getProperty("privateproperty")); + Assert.assertEquals("protectedvalue", user1user1.getProperty("protectedproperty")); + // save and reload to check + user1Session.getAuthorizableManager().updateAuthorizable(user1user1); + User user1user1Check = (User) user1Session.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user1"); + Assert.assertEquals("newpubvalue1", user1user1Check.getProperty("publicproperty")); + Assert.assertEquals("newprivvalue1", user1user1Check.getProperty("privateproperty")); + Assert.assertEquals("protectedvalue", user1user1Check.getProperty("protectedproperty")); + // now check user1 access to user2 + user1user2.setProperty("publicproperty","newpubvalue1"); + user1user2.setProperty("privateproperty","newprivvalue1"); + // but not the protected property. + user1user2.setProperty("protectedproperty","newprotectedvalue1"); + Assert.assertEquals("newpubvalue1", user1user2.getProperty("publicproperty")); + Assert.assertNull(user1user2.getProperty("privateproperty")); + Assert.assertNull(user1user2.getProperty("protectedproperty")); + // save and reload to check + try { + user1Session.getAuthorizableManager().updateAuthorizable(user1user2); + Assert.fail("User1 cant update user2, even if they can modify the local copy of user2"); + } catch ( AccessDeniedException e ) { + //ok + } + User user1user2Check = (User) user1Session.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user2"); + Assert.assertEquals("publicvalue", user1user2Check.getProperty("publicproperty")); + Assert.assertNull(user1user2Check.getProperty("privateproperty")); + Assert.assertNull(user1user2Check.getProperty("protectedproperty")); + + user1Session.logout(); + // all ok for user1, + + + Session user2Session = repository.login(user2.getId(), "password2"); + // locate user1 and user2 + User user2user1 = (User) user2Session.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user1"); + User user2user2 = (User) user2Session.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user2"); + // on user1, user1 should be able to read all properties + Assert.assertEquals("publicvalue", user2user2.getProperty("publicproperty")); + Assert.assertEquals("privatevalue", user2user2.getProperty("privateproperty")); + Assert.assertEquals("protectedvalue", user2user2.getProperty("protectedproperty")); + // on user2, user1 should only be able to read the public property + Assert.assertEquals("newpubvalue1", user2user1.getProperty("publicproperty")); + Assert.assertNull(user2user1.getProperty("privateproperty")); + Assert.assertNull(user2user1.getProperty("protectedproperty")); + // on user1, user1 should be able to set public value and private value + user2user2.setProperty("publicproperty","newpubvalue1"); + user2user2.setProperty("privateproperty","newprivvalue1"); + // but not the protected property. + user2user2.setProperty("protectedproperty","newprotectedvalue1"); + Assert.assertEquals("newpubvalue1", user2user2.getProperty("publicproperty")); + Assert.assertEquals("newprivvalue1", user2user2.getProperty("privateproperty")); + Assert.assertEquals("protectedvalue", user2user2.getProperty("protectedproperty")); + // save and reload to check + user2Session.getAuthorizableManager().updateAuthorizable(user2user2); + User user2user2Check = (User) user2Session.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user2"); + Assert.assertEquals("newpubvalue1", user2user2Check.getProperty("publicproperty")); + Assert.assertEquals("newprivvalue1", user2user2Check.getProperty("privateproperty")); + Assert.assertEquals("protectedvalue", user2user2Check.getProperty("protectedproperty")); + // now check user1 access to user2 + user2user1.setProperty("publicproperty","newpubvalue1"); + user2user1.setProperty("privateproperty","newprivvalue1"); + // but not the protected property. + user2user1.setProperty("protectedproperty","newprotectedvalue1"); + Assert.assertEquals("newpubvalue1", user2user1.getProperty("publicproperty")); + Assert.assertNull(user2user1.getProperty("privateproperty")); + Assert.assertNull(user2user1.getProperty("protectedproperty")); + // save and reload to check + try { + user2Session.getAuthorizableManager().updateAuthorizable(user2user1); + Assert.fail("User2 cant update user1, even if they can modify the local copy of user2"); + } catch ( AccessDeniedException e ) { + //ok + } + User user2user1Check = (User) user2Session.getAuthorizableManager().findAuthorizable("testAuthorizablePermissions.user1"); + Assert.assertEquals("newpubvalue1", user2user1Check.getProperty("publicproperty")); + Assert.assertNull(user2user1Check.getProperty("privateproperty")); + Assert.assertNull(user2user1Check.getProperty("protectedproperty")); + + + + } +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/content/AbstractContentManagerFinderTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/content/AbstractContentManagerFinderTest.java new file mode 100644 index 00000000..1be70bd3 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/content/AbstractContentManagerFinderTest.java @@ -0,0 +1,1249 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.content; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.LoggingStorageListener; +import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.PrincipalValidatorResolverImpl; +import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +public abstract class AbstractContentManagerFinderTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(AbstractContentManagerTest.class); + private StorageClient client; + private ConfigurationImpl configuration; + private StorageClientPool clientPool; + private PrincipalValidatorResolver principalValidatorResolver = new PrincipalValidatorResolverImpl(); + + @Before + public void before() throws StorageClientException, AccessDeniedException, ClientPoolException, + ClassNotFoundException, IOException { + configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + clientPool = getClientPool(configuration); + client = clientPool.getClient(); + AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, + configuration); + authorizableActivator.setup(); + LOGGER.info("Setup Complete"); + } + + protected abstract StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException; + + @After + public void after() throws ClientPoolException { + client.close(); + } + + + @Test + public void testSimpleFind() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, null, new LoggingStorageListener()); + contentManager.update(new Content("/simpleFind", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue1"))); + contentManager.update(new Content("/simpleFind/item2", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue1"))); + contentManager.update(new Content("/simpleFind/test", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue3"))); + contentManager.update(new Content("/simpleFind/test/ing", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue4"))); + + verifyResults(contentManager.find(ImmutableMap.of("sakai:marker", (Object) "testSimpleFindvalue4")), + ImmutableSet.of("/simpleFind/test/ing")); + verifyResults(contentManager.find(ImmutableMap.of("sakai:marker", (Object) "testSimpleFindvalue1")), + ImmutableSet.of("/simpleFind", "/simpleFind/item2")); + + } + + @Test + public void testSimpleFindWithSort() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, null, new LoggingStorageListener()); + contentManager.update(new Content("/simpleFind", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue1"))); + contentManager.update(new Content("/simpleFind/item2", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue1"))); + contentManager.update(new Content("/simpleFind/test", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue3"))); + contentManager.update(new Content("/simpleFind/test/ing", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue4"))); + + verifyResults(contentManager.find(ImmutableMap.of("sakai:marker", (Object) "testSimpleFindvalue4", "_sort", "sakai:marker")), + ImmutableSet.of("/simpleFind/test/ing")); + verifyResults(contentManager.find(ImmutableMap.of("sakai:marker", (Object) "testSimpleFindvalue1", "_sort", "sakai:marker")), + ImmutableSet.of("/simpleFind", "/simpleFind/item2")); + + } + + @Test + public void testSimpleArrayFind() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, null, new LoggingStorageListener()); + contentManager.update(new Content("/simpleArrayFind", ImmutableMap.of("sakai:category", + (Object) new String[] { "testSimpleArrayFindvalue88", "testSimpleArrayFindvalue1" }))); + contentManager.update(new Content("/simpleArrayFind/item2", ImmutableMap.of("sakai:category", + (Object) new String[] { "testSimpleArrayFindvalue88", "testSimpleArrayFindvalue1" }))); + contentManager.update(new Content("/simpleArrayFind/test", ImmutableMap.of("sakai:category", + (Object) new String[] { "testSimpleArrayFindvalue44", "testSimpleArrayFindvalue3" }))); + contentManager.update(new Content("/simpleArrayFind/test/ing", ImmutableMap.of( + "sakai:category", (Object) new String[] { "testSimpleArrayFindvalue88", "testSimpleArrayFindvalue4" }))); + + verifyResults(contentManager.find(ImmutableMap.of("sakai:category", (Object) "testSimpleArrayFindvalue4")), + ImmutableSet.of("/simpleArrayFind/test/ing")); + verifyResults(contentManager.find(ImmutableMap.of("sakai:category", (Object) "testSimpleArrayFindvalue1")), + ImmutableSet.of("/simpleArrayFind", "/simpleArrayFind/item2")); + verifyResults(contentManager.find(ImmutableMap.of("sakai:category", (Object) "testSimpleArrayFindvalue88")), + ImmutableSet.of("/simpleArrayFind/test/ing", "/simpleArrayFind", + "/simpleArrayFind/item2")); + + } + + @Test + public void testFindNoFilter() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, null, new LoggingStorageListener()); + contentManager.update(new Content("/testFindNoFilter", ImmutableMap.of("sakai:marker", + (Object) new String[] { "testFindNoFiltervalue88", "testFindNoFiltervalue1" }))); + contentManager.update(new Content("/testFindNoFilter/item2", ImmutableMap.of("sakai:marker", + (Object) new String[] { "testFindNoFiltervalue88", "testFindNoFiltervalue1" }))); + contentManager.update(new Content("/testFindNoFilter/test", ImmutableMap.of("sakai:marker", + (Object) new String[] { "testFindNoFiltervalue44", "testFindNoFiltervalue3" }))); + contentManager.update(new Content("/testFindNoFilter/test/ing", ImmutableMap.of( + "sakai:marker", (Object) new String[] { "testFindNoFiltervalue88", "testSimpleArrayFindvalue4" }))); + + Iterable found = contentManager.find(ImmutableMap.of("non-indexed-property", (Object) "testFindNoFiltervalue4")); + Iterator foundIterator = found.iterator(); + Assert.assertFalse(foundIterator.hasNext()); + } + + protected void verifyResults(Iterable ic, Set shouldFind) { + int i = 0; + for (Content c : ic) { + String path = c.getPath(); + if (shouldFind.contains(c.getPath())) { + i++; + } else { + LOGGER.info("Found wrong content {}", path); + } + } + Assert.assertEquals(shouldFind.size(), i); + } + + /** + * search for "a" find contentA + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindA() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) MV.multiValueA[0]); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" find contentA + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindA2() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) MV.multiValueA[1]); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "x" find contentX only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindX() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) MV.multiValueB[0]); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathB, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueB, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "x" find contentX only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindX2() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) MV.multiValueB[1]); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathB, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueB, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "x" find contentX only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindX3() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) MV.multiValueB[2]); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathB, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueB, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" or "b" find contentA only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAorB() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(MV.multiValueA)); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" or "b" find contentA only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindBorA() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(new String[] { MV.multiValueA[1], MV.multiValueA[0] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "x" or "y" find contentX only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindXorY() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(new String[] { MV.multiValueB[0], MV.multiValueB[1] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathB, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueB, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "x" or "y" find contentX only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindXorZ() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(new String[] { MV.multiValueB[0], MV.multiValueB[2] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathB, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueB, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" and "b" find contentA only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAandB() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(MV.multiValueA)); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.multiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" and "x" find nothing + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAandX() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(new String[] { MV.multiValueA[0], MV.multiValueB[0] })); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertFalse("Should NOT have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + found++; + } + assertTrue("Should NOT have found any match; found: " + found, found == 0); + } + + /** + * search for "a" and "x" find nothing + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAandX2() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(new String[] { MV.multiValueA[1], MV.multiValueB[1] })); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertFalse("Should NOT have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + found++; + } + assertTrue("Should NOT have found any match; found: " + found, found == 0); + } + + /** + * search for "a" and "x" find nothing + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAandX3() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(new String[] { MV.multiValueA[1], MV.multiValueB[2] })); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertFalse("Should NOT have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + found++; + } + assertTrue("Should NOT have found any match; found: " + found, found == 0); + } + + /** + * search for "a" or "x" find contentA and contentB + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAorX() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(new String[] { MV.multiValueA[0], MV.multiValueB[0] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertTrue("Path should match one of the two Contents", + MV.pathA.equals(match.getPath()) || MV.pathB.equals(match.getPath())); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("Multi-valued property should equal one of the two Contents", + Arrays.equals(MV.multiValueA, (String[]) match.getProperty(MV.propKey)) + || Arrays.equals(MV.multiValueB, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found two matches; found: " + found, found == 2); + } + + /** + * search for "a" or "x" find contentA and contentB + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAorX2() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(new String[] { MV.multiValueA[1], MV.multiValueB[1] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertTrue("Path should match one of the two Contents", + MV.pathA.equals(match.getPath()) || MV.pathB.equals(match.getPath())); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("Multi-valued property should equal one of the two Contents", + Arrays.equals(MV.multiValueA, (String[]) match.getProperty(MV.propKey)) + || Arrays.equals(MV.multiValueB, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found two matches; found: " + found, found == 2); + } + + /** + * search for "a" or "x" find contentA and contentB + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAorX3() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupMultiValuedIndexSearch(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays.asList(new String[] { MV.multiValueA[1], MV.multiValueB[2] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertTrue("Path should match one of the two Contents", + MV.pathA.equals(match.getPath()) || MV.pathB.equals(match.getPath())); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("Multi-valued property should equal one of the two Contents", + Arrays.equals(MV.multiValueA, (String[]) match.getProperty(MV.propKey)) + || Arrays.equals(MV.multiValueB, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found two matches; found: " + found, found == 2); + } + + /** + * search for "a" find contentA + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltA() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) MV.altMultiValueA[0]); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" find contentA + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltA2() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) MV.altMultiValueA[1]); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" find contentA + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltA3() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) MV.altMultiValueA[2]); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" or "b" find contentA only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltAorB() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays + .asList(new String[] { MV.altMultiValueA[0], MV.altMultiValueA[1] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" or "b" find contentA only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltAorB2() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays + .asList(new String[] { MV.altMultiValueA[1], MV.altMultiValueA[2] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" and "b" find contentA only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltAandB() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays + .asList(new String[] { MV.altMultiValueA[0], MV.altMultiValueA[1] })); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "a" and "x" find nothing + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltAandX() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays + .asList(new String[] { MV.altMultiValueA[0], MV.altMultiValueB[0] })); + final Iterable iterable = contentManager.find(searchCriteria); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertFalse("Should NOT have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + found++; + } + assertTrue("Should NOT have found any matches; found: " + found, found == 0); + } + + /** + * search for "a" or "x" find contentA and contentB + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltAorX() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays + .asList(new String[] { MV.altMultiValueA[0], MV.altMultiValueB[0] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertTrue("Path should match one of the two Contents", + MV.pathA.equals(match.getPath()) || MV.pathB.equals(match.getPath())); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue( + "Multi-valued property should equal one of the two Contents", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey)) + || Arrays.equals(MV.altMultiValueB, + (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found two matches; found: " + found, found == 2); + } + + /** + * search for "a" or "x" find contentA and contentB + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltAorX2() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays + .asList(new String[] { MV.altMultiValueA[1], MV.altMultiValueB[1] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertTrue("Path should match one of the two Contents", + MV.pathA.equals(match.getPath()) || MV.pathB.equals(match.getPath())); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue( + "Multi-valued property should equal one of the two Contents", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey)) + || Arrays.equals(MV.altMultiValueB, + (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found two matches; found: " + found, found == 2); + } + + /** + * search for "a" or "x" find contentA and contentB + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltAorX3() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays + .asList(new String[] { MV.altMultiValueA[2], MV.altMultiValueB[1] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertTrue("Path should match one of the two Contents", + MV.pathA.equals(match.getPath()) || MV.pathB.equals(match.getPath())); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue( + "Multi-valued property should equal one of the two Contents", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey)) + || Arrays.equals(MV.altMultiValueB, + (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found two matches; found: " + found, found == 2); + } + + /** + * search for "x" or "y" find contentX only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltXorY() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays + .asList(new String[] { MV.altMultiValueA[0], MV.altMultiValueA[1] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * search for "x" or "y" find contentX only once + * + * @throws StorageClientException + * @throws AccessDeniedException + */ + @Test + public void testMultiValuedIndexSearchFindAltXorZ() throws StorageClientException, + AccessDeniedException { + final ContentManager contentManager = setupAlternateMultiValuedProperties(); + final Map searchCriteria = ImmutableMap.of(MV.propKey, + (Object) Arrays + .asList(new String[] { MV.altMultiValueA[0], MV.altMultiValueA[2] })); + final Map orSet = ImmutableMap.of("orset0", (Object) searchCriteria); + final Iterable iterable = contentManager.find(orSet); + assertNotNull("Iterable should not be null", iterable); + final Iterator iter = iterable.iterator(); + assertNotNull("Iterator should not be null", iter); + assertTrue("Should have found a match", iter.hasNext()); + int found = 0; + while (iter.hasNext()) { + final Content match = iter.next(); + assertNotNull("match should not be null", match); + assertEquals(MV.pathA, match.getPath()); + assertNotNull("match should have key: " + MV.propKey, match.getProperty(MV.propKey)); + assertTrue("String[] should be equal", + Arrays.equals(MV.altMultiValueA, (String[]) match.getProperty(MV.propKey))); + found++; + } + assertTrue("Should have found only one match; found: " + found, found == 1); + } + + /** + * Create two contents with default values + * + * @return + * @throws StorageClientException + * @throws AccessDeniedException + */ + private ContentManager setupMultiValuedIndexSearch() throws StorageClientException, + AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + ContentManager contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, null, new LoggingStorageListener()); + // add some content with multi-valued properties + Content contentA = contentManager.get(MV.pathA); + if (contentA == null) { + contentManager.update(new Content(MV.pathA, ImmutableMap.of(MV.propKey, + (Object) MV.multiValueA))); + } else { + contentA.setProperty(MV.propKey, (Object) MV.multiValueA); + contentManager.update(contentA); + } + Content contentX = contentManager.get(MV.pathB); + if (contentX == null) { + contentManager.update(new Content(MV.pathB, ImmutableMap.of(MV.propKey, + (Object) MV.multiValueB))); + } else { + contentX.setProperty(MV.propKey, (Object) MV.multiValueB); + contentManager.update(contentX); + } + + // get the content. + contentA = contentManager.get(MV.pathA); + contentX = contentManager.get(MV.pathB); + + // force a second update to ensure that the fields have been written more than once, + // if there is a problem this will cause the tests to fail when they are run in a batch or + // individually. + contentX.setProperty(MV.propKey, (Object) MV.multiValueB); + contentManager.update(contentX); + contentA.setProperty(MV.propKey, (Object) MV.multiValueA); + contentManager.update(contentA); + + // verify state of content + contentA = contentManager.get(MV.pathA); + contentX = contentManager.get(MV.pathB); + + + assertEquals(MV.pathA, contentA.getPath()); + assertEquals(MV.pathB, contentX.getPath()); + Map propsA = contentA.getProperties(); + Map propsX = contentX.getProperties(); + assertTrue(Arrays.equals(MV.multiValueA, (String[]) propsA.get(MV.propKey))); + assertTrue(Arrays.equals(MV.multiValueB, (String[]) propsX.get(MV.propKey))); + + + + return contentManager; + } + + /** + * Change the values of the properties to something else + * + * @return + * @throws StorageClientException + * @throws AccessDeniedException + */ + private ContentManager setupAlternateMultiValuedProperties() + throws StorageClientException, AccessDeniedException { + ContentManager contentManager = setupMultiValuedIndexSearch(); + // set some alternate multi-valued properties + Content contentA = contentManager.get(MV.pathA); + contentA.setProperty(MV.propKey, (Object) MV.altMultiValueA); + contentManager.update(contentA); + Content contentX = contentManager.get(MV.pathB); + contentX.setProperty(MV.propKey, (Object) MV.altMultiValueB); + contentManager.update(contentX); + + // verify state of content + contentA = contentManager.get(MV.pathA); + contentX = contentManager.get(MV.pathB); + assertEquals(MV.pathA, contentA.getPath()); + assertEquals(MV.pathB, contentX.getPath()); + Map propsA = contentA.getProperties(); + Map propsX = contentX.getProperties(); + Assert + .assertTrue(Arrays.equals(MV.altMultiValueA, (String[]) propsA.get(MV.propKey))); + Assert + .assertTrue(Arrays.equals(MV.altMultiValueB, (String[]) propsX.get(MV.propKey))); + return contentManager; + } + + private static class MV { + private static final String propKey = "sakai:category"; + private static final String pathA = "/multi/pathA"; + private static final String pathB = "/multi/pathB"; + private static final String[] multiValueA = new String[] { "valueA", "valueB" }; + private static final String[] multiValueB = new String[] { "valueX", "valueY", + "valueZ" }; + private static final String[] altMultiValueA = multiValueB; + private static final String[] altMultiValueB = multiValueA; + } + + @Test + public void testFindAfterChangingPropertyValue() throws Exception { + + String oldValue = "testFindAfterChangingPropertyValue-val1-"+System.currentTimeMillis(); + String newValue = "testFindAfterChangingPropertyValue-newval-"+System.currentTimeMillis(); + + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), principalValidatorResolver); + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, null, new LoggingStorageListener()); + + StorageClientUtils.deleteTree(contentManager, "/testFindAfterChangingPropertyValue"); + + // create content + contentManager.update(new Content("/testFindAfterChangingPropertyValue", ImmutableMap.of("sakai:marker", (Object) oldValue))); + + // after initial creation, prop1 should be "val1" + Iterable results = contentManager.find(ImmutableMap.of("sakai:marker", (Object) oldValue)); + Iterator resultsIterator = results.iterator(); + Assert.assertTrue(resultsIterator.hasNext()); + Content found = resultsIterator.next(); + Assert.assertEquals("/testFindAfterChangingPropertyValue", found.getPath()); + Assert.assertEquals(oldValue, found.getProperty("sakai:marker")); + + // now change prop1 + found.setProperty("sakai:marker", newValue); + contentManager.update(found); + + // calling get() shows prop1 has been updated + Content gotten = contentManager.get("/testFindAfterChangingPropertyValue"); + Assert.assertEquals(newValue, gotten.getProperty("sakai:marker")); + + // ok, now see if we can find the object searching on "newval" + Iterable findOfNewVal = contentManager.find(ImmutableMap.of("sakai:marker", (Object) newValue)); + Content foundAfterUpdate = findOfNewVal.iterator().next(); + Assert.assertEquals(newValue, foundAfterUpdate.getProperty("sakai:marker")); + + // find on the old val should return an empty iterator + Iterable findOfOldval = contentManager.find(ImmutableMap.of("sakai:marker", (Object) oldValue)); + // if find() is correct this line should pass + Assert.assertFalse(findOfOldval.iterator().hasNext()); + } + + @Test + public void testCountTest() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), + principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, null, new LoggingStorageListener()); + contentManager.update(new Content("/simpleFind", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue1"))); + contentManager.update(new Content("/simpleFind/item2", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue1"))); + contentManager.update(new Content("/simpleFind/test", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue3"))); + contentManager.update(new Content("/simpleFind/test/ing", ImmutableMap.of("sakai:marker", + (Object) "testSimpleFindvalue4"))); + + Assert.assertEquals(1, contentManager.count(ImmutableMap.of("sakai:marker", (Object) "testSimpleFindvalue4"))); + Assert.assertEquals(2, contentManager.count(ImmutableMap.of("sakai:marker", (Object) "testSimpleFindvalue1"))); + + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/content/AbstractContentManagerTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/content/AbstractContentManagerTest.java new file mode 100644 index 00000000..4567c92d --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/content/AbstractContentManagerTest.java @@ -0,0 +1,1198 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.content; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessControlManager; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.authorizable.AuthorizableManager; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.sakaiproject.nakamura.lite.BaseMemoryRepository; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.LoggingStorageListener; +import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.PrincipalValidatorResolverImpl; +import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; +import org.sakaiproject.nakamura.lite.storage.spi.ConcurrentLRUMap; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterators; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +public abstract class AbstractContentManagerTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(AbstractContentManagerTest.class); + private StorageClient client; + private ConfigurationImpl configuration; + private StorageClientPool clientPool; + private Map sharedCache = new ConcurrentLRUMap(1000); + private PrincipalValidatorResolver principalValidatorResolver = new PrincipalValidatorResolverImpl(); + + @Before + public void before() throws StorageClientException, AccessDeniedException, ClientPoolException, + ClassNotFoundException, IOException { + + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration = new ConfigurationImpl(); + configuration.activate(properties); + clientPool = getClientPool(configuration); + client = clientPool.getClient(); + AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, + configuration); + authorizableActivator.setup(); + LOGGER.info("Setup Complete"); + } + + protected abstract StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException; + + @After + public void after() throws ClientPoolException { + client.close(); + } + + @Test + public void testCreateContent() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + contentManager.update(new Content("/testCreateContent", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testCreateContent/test", ImmutableMap.of("prop1", (Object) "value2"))); + contentManager + .update(new Content("/testCreateContent/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); + + Content content = contentManager.get("/testCreateContent"); + Assert.assertEquals("/testCreateContent", content.getPath()); + Map p = content.getProperties(); + LOGGER.info("Properties is {}",p); + Assert.assertEquals("value1", (String)p.get("prop1")); + Iterator children = content.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + Content child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testCreateContent/test", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value2", (String)p.get("prop1")); + children = child.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testCreateContent/test/ing", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value3", (String)p.get("prop1")); + + } + + + @Test + public void testCreateContentWithNewChild() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + String testId = "/testCreateContentWithNewChild"+System.nanoTime(); + contentManager.update(new Content(testId, ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content(testId+"/test", ImmutableMap.of("prop1", (Object) "value2"))); + contentManager + .update(new Content(testId+"/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); + + Content content = contentManager.get(testId); + Assert.assertEquals(testId, content.getPath()); + Map p = content.getProperties(); + LOGGER.info("Properties is {}",p); + Assert.assertEquals("value1", (String)p.get("prop1")); + Iterator children = content.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + Content child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals(testId+"/test", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value2", (String)p.get("prop1")); + children = child.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals(testId+"/test/ing", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value3", (String)p.get("prop1")); + + Content testChild = contentManager.get(testId+"/test"); + Set childPaths = Sets.newHashSet(); + children = testChild.listChildren().iterator(); + while ( children.hasNext() ) { + child = children.next(); + Assert.assertNotNull(child); + childPaths.add(child.getPath()); + } + Assert.assertEquals(1, childPaths.size()); + Assert.assertTrue(childPaths.contains(testId+"/test/ing")); + childPaths.clear(); + + contentManager + .update(new Content(testId+"/test/newchild", ImmutableMap.of("prop1", (Object) "value3"))); + children = testChild.listChildren().iterator(); + while ( children.hasNext() ) { + child = children.next(); + Assert.assertNotNull(child); + childPaths.add(child.getPath()); + } + Assert.assertEquals(2, childPaths.size()); + Assert.assertTrue(childPaths.contains(testId+"/test/newchild")); + Assert.assertTrue(childPaths.contains(testId+"/test/ing")); + + + + } + + @Test + public void testCreateContent2() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + contentManager.update(new Content("newRootTestCreateContent", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("newRootTestCreateContent/test", ImmutableMap.of("prop1", (Object) "value2"))); + contentManager + .update(new Content("newRootTestCreateContent/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); + + Content content = contentManager.get("newRootTestCreateContent"); + Assert.assertEquals("newRootTestCreateContent", content.getPath()); + Map p = content.getProperties(); + LOGGER.info("Properties is {}",p); + Assert.assertEquals("value1", (String)p.get("prop1")); + Iterator children = content.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + Content child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("newRootTestCreateContent/test", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value2", (String)p.get("prop1")); + children = child.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("newRootTestCreateContent/test/ing", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value3", (String)p.get("prop1")); + + } + + @Test + public void testConentTree() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, null, new LoggingStorageListener()); + contentManager.update(new Content("testConentTree/1/11/111", ImmutableMap.of("prop111", + (Object) "value111"))); + contentManager.update(new Content("testConentTree/1/11/333", ImmutableMap.of("prop333", + (Object) "value333"))); + contentManager.update(new Content("testConentTree/1/11/222", ImmutableMap.of("prop222", + (Object) "value222"))); + + contentManager.update(new Content("testConentTree/1/22/444", ImmutableMap.of("prop444", + (Object) "value444"))); + contentManager.update(new Content("testConentTree/1/22/555", ImmutableMap.of("prop555", + (Object) "value555"))); + contentManager.update(new Content("testConentTree/1/22/666", ImmutableMap.of("prop666", + (Object) "value666"))); + contentManager.update(new Content("testConentTree/1/22/777", ImmutableMap.of("prop777", + (Object) "value777"))); + Content content11 = contentManager.get("testConentTree/1/11"); + Set childSet = Sets.newHashSet(); + int i = 0; + for ( String c : content11.listChildPaths()){ + i++; + childSet.add(c); + } + Assert.assertEquals(i,childSet.size()); + Assert.assertEquals(i,3); + Assert.assertTrue(childSet.contains("testConentTree/1/11/111")); + Assert.assertTrue(childSet.contains("testConentTree/1/11/222")); + Assert.assertTrue(childSet.contains("testConentTree/1/11/333")); + + content11 = contentManager.get("testConentTree/1/22"); + childSet.clear(); + i = 0; + for ( String c : content11.listChildPaths()){ + i++; + childSet.add(c); + } + Assert.assertEquals(i,childSet.size()); + Assert.assertEquals(i,4); + Assert.assertTrue(childSet.contains("testConentTree/1/22/444")); + Assert.assertTrue(childSet.contains("testConentTree/1/22/555")); + Assert.assertTrue(childSet.contains("testConentTree/1/22/666")); + Assert.assertTrue(childSet.contains("testConentTree/1/22/777")); + + content11 = contentManager.get("testConentTree/1"); + childSet.clear(); + i = 0; + for ( String c : content11.listChildPaths()){ + i++; + childSet.add(c); + } + Assert.assertEquals(i,childSet.size()); + Assert.assertEquals(i,2); + Assert.assertTrue(childSet.contains("testConentTree/1/11")); + Assert.assertTrue(childSet.contains("testConentTree/1/22")); + + + } + + + @Test + public void testCopySimple() throws StorageClientException, AccessDeniedException, IOException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + contentManager.update(new Content("testCopySimple/source/thefile", ImmutableMap.of("prop", + (Object) "source"))); + contentManager.update(new Content("testCopySimple/destination", ImmutableMap.of("prop", + (Object) "dest"))); + + contentManager.copy("testCopySimple/source/thefile", "testCopySimple/destination/target", false); + Content check = contentManager.get("testCopySimple/source/thefile"); + Assert.assertEquals("testCopySimple/source/thefile", check.getPath()); + Assert.assertEquals("source", check.getProperty("prop")); + Set checkChildren = Sets.newHashSet(); + int countChildren = 0; + for ( String child : check.listChildPaths()) { + countChildren++; + checkChildren.add(child); + } + Assert.assertEquals(0, countChildren); + Assert.assertEquals(0, checkChildren.size()); + + check = contentManager.get("testCopySimple/destination"); + Assert.assertEquals("testCopySimple/destination", check.getPath()); + Assert.assertEquals("dest", check.getProperty("prop")); + + checkChildren = Sets.newHashSet(); + countChildren = 0; + for ( String child : check.listChildPaths()) { + countChildren++; + checkChildren.add(child); + } + Assert.assertEquals(1, countChildren); + Assert.assertEquals(1, checkChildren.size()); + Assert.assertTrue(checkChildren.contains("testCopySimple/destination/target")); + + check = contentManager.get("testCopySimple/destination/target"); + Assert.assertNotNull(check); + Assert.assertEquals("testCopySimple/destination/target", check.getPath()); + Assert.assertEquals("source", check.getProperty("prop")); + + checkChildren = Sets.newHashSet(); + countChildren = 0; + for ( String child : check.listChildPaths()) { + countChildren++; + checkChildren.add(child); + } + Assert.assertEquals(0, countChildren); + Assert.assertEquals(0, checkChildren.size()); + } + + @Test + public void testCopyOverwrite() throws StorageClientException, AccessDeniedException, IOException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + contentManager.update(new Content("testCopyOverwrite/source/thefile", ImmutableMap.of("prop", + (Object) "source"))); + contentManager.update(new Content("testCopyOverwrite/destination/target", ImmutableMap.of("prop", + (Object) "dest"))); + + contentManager.copy("testCopyOverwrite/source/thefile", "testCopyOverwrite/destination/target", false); + Content check = contentManager.get("testCopyOverwrite/source/thefile"); + Assert.assertEquals("testCopyOverwrite/source/thefile", check.getPath()); + Assert.assertEquals("source", check.getProperty("prop")); + Set checkChildren = Sets.newHashSet(); + int countChildren = 0; + for ( String child : check.listChildPaths()) { + countChildren++; + checkChildren.add(child); + } + Assert.assertEquals(0, countChildren); + Assert.assertEquals(0, checkChildren.size()); + + check = contentManager.get("testCopyOverwrite/destination"); + Assert.assertEquals("testCopyOverwrite/destination", check.getPath()); + Assert.assertNull(check.getProperty("prop")); + + checkChildren = Sets.newHashSet(); + countChildren = 0; + for ( String child : check.listChildPaths()) { + countChildren++; + checkChildren.add(child); + } + Assert.assertEquals(1, countChildren); + Assert.assertEquals(1, checkChildren.size()); + Assert.assertTrue(checkChildren.contains("testCopyOverwrite/destination/target")); + + check = contentManager.get("testCopyOverwrite/destination/target"); + Assert.assertNotNull(check); + Assert.assertEquals("testCopyOverwrite/destination/target", check.getPath()); + Assert.assertEquals("source", check.getProperty("prop")); + + checkChildren = Sets.newHashSet(); + countChildren = 0; + for ( String child : check.listChildPaths()) { + countChildren++; + checkChildren.add(child); + } + Assert.assertEquals(0, countChildren); + Assert.assertEquals(0, checkChildren.size()); + } + + + + @Test + public void testSimpleDelete() throws AccessDeniedException, StorageClientException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + String path = "/testSimpleDelete/test2/test3/test4"; + String parentPath = "/testSimpleDelete/test2/test3"; + contentManager.update(new Content(parentPath, ImmutableMap.of("propParent", (Object) "valueParent"))); + contentManager.update(new Content(path, ImmutableMap.of("prop1", (Object) "value1"))); + Content content = contentManager.get(path); + Assert.assertNotNull(content); + Assert.assertEquals("value1", content.getProperty("prop1")); + + contentManager.delete(path); + Assert.assertNull(contentManager.get(path)); + content = contentManager.get(parentPath); + Assert.assertNotNull(content); + Assert.assertEquals("valueParent", content.getProperty("propParent")); + + contentManager.delete("/testSimpleDelete", true); + + } + + @Test + public void testSimpleDeleteRoot() throws AccessDeniedException, StorageClientException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + String path = "testSimpleDeleteRoot/test2/test3/test4"; + String parentPath = "testSimpleDeleteRoot/test2/test3"; + contentManager.update(new Content(parentPath, ImmutableMap.of("propParent", (Object) "valueParent"))); + contentManager.update(new Content(path, ImmutableMap.of("prop1", (Object) "value1"))); + Content content = contentManager.get(path); + Assert.assertNotNull(content); + Assert.assertEquals("value1", content.getProperty("prop1")); + + contentManager.delete(path); + Assert.assertNull(contentManager.get(path)); + content = contentManager.get(parentPath); + Assert.assertNotNull(content); + Assert.assertEquals("valueParent", content.getProperty("propParent")); + + contentManager.delete("testSimpleDeleteRoot", true); + + } + + @Test + public void testDeleteContent() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + contentManager.update(new Content("/testDeleteContent", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testDeleteContent/test", ImmutableMap.of("prop1", (Object) "value2"))); + contentManager + .update(new Content("/testDeleteContent/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); + for (int i = 0; i < 5; i++) { + for (int j = 0; j < 5; j++) { + for (int k = 0; k < 5; k++) { + contentManager.update(new Content("/testDeleteContent/test/ing/" + i + "/" + j + "/" + k, + ImmutableMap.of("prop1", (Object) "value3"))); + } + } + } + + Content content = contentManager.get("/testDeleteContent"); + Assert.assertEquals("/testDeleteContent", content.getPath()); + Map p = content.getProperties(); + Assert.assertEquals("value1", (String)p.get("prop1")); + Iterator children = content.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + Content child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testDeleteContent/test", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value2", (String)p.get("prop1")); + children = child.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testDeleteContent/test/ing", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value3", (String)p.get("prop1")); + + StorageClientUtils.deleteTree(contentManager, "/testDeleteContent/test/ing"); + content = contentManager.get("/testDeleteContent/test/ing"); + Assert.assertNull(content); + for (int i = 0; i < 5; i++) { + for (int j = 0; j < 5; j++) { + for (int k = 0; k < 5; k++) { + Assert.assertNull(contentManager.get("/testDeleteContent/test/ing/" + i + "/" + j + "/" + k)); + } + } + } + + } + + @Test + public void testDeleteContentDeletesPathConsistently() throws StorageClientException, AccessDeniedException + { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + contentManager.update(new Content("/testDeleteContent", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testDeleteContent/test", ImmutableMap.of("prop1", (Object) "value2"))); + + contentManager.delete("/testDeleteContent/test"); + + Assert.assertNull(contentManager.get("/testDeleteContent/test")); + + Content parent = contentManager.get("/testDeleteContent"); + + Iterator + children = null; + Iterator + childPaths = null; + + children = parent.listChildren().iterator(); + childPaths = parent.listChildPaths().iterator(); + + Assert.assertFalse(children.hasNext()); + Assert.assertFalse(childPaths.hasNext()); + + children = contentManager.listChildren("/testDeleteContent"); + childPaths = contentManager.listChildPaths("/testDeleteContent"); + + Assert.assertFalse(children.hasNext()); + Assert.assertFalse(childPaths.hasNext()); + } + + @Test + public void testUpdateContent() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + StorageClientUtils.deleteTree(contentManager, "/testUpdateContent"); + contentManager.update(new Content("/testUpdateContent", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testUpdateContent/test", ImmutableMap.of("prop1", (Object) "value2"))); + contentManager + .update(new Content("/testUpdateContent/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); + + Content content = contentManager.get("/testUpdateContent"); + Assert.assertEquals("/testUpdateContent", content.getPath()); + Map p = content.getProperties(); + Assert.assertEquals("value1", (String)p.get("prop1")); + Iterator children = content.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + Content child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testUpdateContent/test", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value2", (String)p.get("prop1")); + children = child.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testUpdateContent/test/ing", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value3", (String)p.get("prop1")); + + p = content.getProperties(); + Assert.assertNull((String)p.get("prop1update")); + + content.setProperty("prop1update", "value4"); + contentManager.update(content); + + content = contentManager.get(content.getPath()); + p = content.getProperties(); + Assert.assertEquals("value4", (String)p.get("prop1update")); + + } + + @Test + public void testVersionContent() throws StorageClientException, AccessDeniedException, + InterruptedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + StorageClientUtils.deleteTree(contentManager, "/testVersionContent"); + contentManager.update(new Content("/testVersionContent", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testVersionContent/test", ImmutableMap.of("prop1", (Object) "value2"))); + contentManager + .update(new Content("/testVersionContent/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); + + Content content = contentManager.get("/testVersionContent"); + Assert.assertEquals("/testVersionContent", content.getPath()); + Map p = content.getProperties(); + Assert.assertEquals("value1", (String)p.get("prop1")); + Iterator children = content.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + Content child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testVersionContent/test", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value2", (String)p.get("prop1")); + children = child.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testVersionContent/test/ing", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value3", (String)p.get("prop1")); + + p = content.getProperties(); + Assert.assertNull((String)p.get("prop1update")); + + // FIXME: add some version list methods, we have no way of testing if + // this works. + String versionName = contentManager.saveVersion("/testVersionContent"); + + // must reload after a version save. + content = contentManager.get("/testVersionContent"); + + content.setProperty("prop1update", "value4"); + contentManager.update(content); + + content = contentManager.get("/testVersionContent"); + p = content.getProperties(); + Assert.assertEquals("value4", (String)p.get("prop1update")); + + // just in case the machine is so fast all of that took 1ms + Thread.sleep(50); + + String versionName2 = contentManager.saveVersion("/testVersionContent"); + + Content versionContent = contentManager.getVersion("/testVersionContent", versionName); + Assert.assertNotNull(versionContent); + Content versionContent2 = contentManager.getVersion("/testVersionContent", versionName2); + Assert.assertNotNull(versionContent2); + List versionList = contentManager.getVersionHistory("/testVersionContent"); + Assert.assertNotNull(versionList); + Assert.assertArrayEquals("Version List is " + Arrays.toString(versionList.toArray()) + + " expecting " + versionName2 + " then " + versionName, new String[] { + versionName2, versionName }, versionList.toArray(new String[versionList.size()])); + + Content badVersionContent = contentManager.getVersion("/testVersionContent", "BadVersion"); + Assert.assertNull(badVersionContent); + + // get version with metadata + Map metadata = ImmutableMap.of("testmd1", "yaymd1", "testmd2", 100); + String metaVersionName = contentManager.saveVersion("/testVersionContent", metadata); + Content metaVersion = contentManager.getVersion("/testVersionContent", metaVersionName); + Assert.assertNotNull(metaVersion); + Assert.assertEquals("yaymd1", metaVersion.getProperty("metadata:testmd1")); + Assert.assertEquals(100, metaVersion.getProperty("metadata:testmd2")); + } + + @Test + public void testUploadContent() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, sharedCache, new LoggingStorageListener()); + StorageClientUtils.deleteTree(contentManager, "/testUploadContent"); + contentManager.update(new Content("/testUploadContent", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testUploadContent/test", ImmutableMap.of("prop1", (Object) "value2"))); + contentManager + .update(new Content("/testUploadContent/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); + + Content content = contentManager.get("/testUploadContent"); + Assert.assertEquals("/testUploadContent", content.getPath()); + Map p = content.getProperties(); + Assert.assertEquals("value1", (String)p.get("prop1")); + Iterator children = content.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + Content child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testUploadContent/test", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value2", (String)p.get("prop1")); + children = child.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testUploadContent/test/ing", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value3", (String)p.get("prop1")); + + p = content.getProperties(); + Assert.assertNull((String)p.get("prop1update")); + + // FIXME: add some version list methods, we have no way of testing if + // this works. + contentManager.saveVersion("/testUploadContent"); + + content = contentManager.get("/testUploadContent"); + + content.setProperty("prop1update", "value4"); + contentManager.update(content); + + content = contentManager.get(content.getPath()); + p = content.getProperties(); + Assert.assertEquals("value4", (String)p.get("prop1update")); + + final byte[] b = new byte[20 * 1024 * 1024 + 1231]; + Random r = new Random(); + r.nextBytes(b); + try { + contentManager.update(new Content("/testUploadContent/test/ing/testfile.txt", ImmutableMap.of( + "testproperty", (Object) "testvalue"))); + long su = System.currentTimeMillis(); + ByteArrayInputStream bais = new ByteArrayInputStream(b); + contentManager.writeBody("/testUploadContent/test/ing/testfile.txt", bais); + bais.close(); + long eu = System.currentTimeMillis(); + + InputStream read = contentManager.getInputStream("/testUploadContent/test/ing/testfile.txt"); + + int i = 0; + byte[] buffer = new byte[8192]; + int j = read.read(buffer); + Assert.assertNotSame(-1, j); + while (j != -1) { + // Assert.assertEquals((int)b[i] & 0xff, j); + i = i + j; + j = read.read(buffer); + } + read.close(); + Assert.assertEquals(b.length, i); + long ee = System.currentTimeMillis(); + LOGGER.info("Write rate {} MB/s Read Rate {} MB/s ", + (1000 * (double) b.length / (1024 * 1024 * (double) (eu - su))), + (1000 * (double) b.length / (1024 * 1024 * (double) (ee - eu)))); + + // Update content and re-read + r.nextBytes(b); + bais = new ByteArrayInputStream(b); + contentManager.writeBody("/testUploadContent/test/ing/testfile.txt", bais); + + read = contentManager.getInputStream("/testUploadContent/test/ing/testfile.txt"); + + i = 0; + j = read.read(buffer); + Assert.assertNotSame(-1, j); + while (j != -1) { + for (int k = 0; k < j; k++) { + Assert.assertEquals(b[i], buffer[k]); + i++; + } + if ((i % 100 == 0) && (i < b.length - 20)) { + Assert.assertEquals(10, read.skip(10)); + i += 10; + } + j = read.read(buffer); + } + read.close(); + Assert.assertEquals(b.length, i); + + } catch (IOException e) { + + // TODO Auto-generated catch block + e.printStackTrace(); + Assert.fail(); + } + + } + + @Test + public void testMoveWithChildren() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, + accessControlManager, configuration, sharedCache, new LoggingStorageListener()); + contentManager.update(new Content("/testMoveWithChildren", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testMoveWithChildren/movewc", ImmutableMap.of("prop1", + (Object) "value2"))); + contentManager.update(new Content("/testMoveWithChildren/test", ImmutableMap + .of("prop1", (Object) "value3"))); + contentManager.update(new Content("/testMoveWithChildren/test/ing", ImmutableMap.of("prop1", + (Object) "value4"))); + StorageClientUtils.deleteTree(contentManager, "/testMoveWithChildren/movewc/test"); + contentManager.move("/testMoveWithChildren/test", "/testMoveWithChildren/movewc/test"); + + Content content = contentManager.get("/testMoveWithChildren"); + Assert.assertEquals("/testMoveWithChildren", content.getPath()); + Map p = content.getProperties(); + LOGGER.info("Properties is {}", p); + Assert.assertEquals("value1", (String) p.get("prop1")); + Iterator children = content.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + Content child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testMoveWithChildren/movewc", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value2", (String) p.get("prop1")); + children = child.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testMoveWithChildren/movewc/test", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value3", (String) p.get("prop1")); + children = child.listChildren().iterator(); + Assert.assertTrue(children.hasNext()); + child = children.next(); + Assert.assertFalse(children.hasNext()); + Assert.assertEquals("/testMoveWithChildren/movewc/test/ing", child.getPath()); + p = child.getProperties(); + Assert.assertEquals("value4", (String) p.get("prop1")); + + } + + @Test + public void testMoveWithForce() throws Exception { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, + accessControlManager, configuration, null, new LoggingStorageListener()); + contentManager.update(new Content("/testMove", ImmutableMap.of("prop1", "value1"))); + contentManager.update(new Content("/testMoveWithForce", ImmutableMap.of("prop1", "value2"))); + + try { + contentManager.move("/testMove", "/testMoveWithForce", false); + Assert.fail("Should throw an exception when trying to move without for to an existing path."); + } catch (StorageClientException e) { + // expected + } + + contentManager.move("/testMove", "/testMoveWithForce", true); + } + + @Test + public void testMoveWithAcls() throws Exception { + Repository repository = (Repository) new BaseMemoryRepository().getRepository(); + Session adminSession = repository.loginAdministrative(); + AuthorizableManager adminAuthorizableManager = adminSession.getAuthorizableManager(); + ContentManager adminContentManager = adminSession.getContentManager(); + AccessControlManager adminAccessControlManager = adminSession.getAccessControlManager(); + + adminContentManager.update(new Content("/testMove1", ImmutableMap.of("prop1", "value1"))); + + // create a test user and some test permissions + String u1 = "user1-" + System.currentTimeMillis(); + adminAuthorizableManager.createUser(u1, u1, u1, null); + Authorizable user1 = adminAuthorizableManager.findAuthorizable(u1); + + // setup an acl on the target + AclModification user1canWrite = new AclModification(AclModification.grantKey(u1), + Permissions.CAN_WRITE.getPermission(), AclModification.Operation.OP_OR); + adminAccessControlManager.setAcl(Security.ZONE_CONTENT, "/testMove1", new AclModification[] { user1canWrite }); + + // verify we can write to the old location + Assert.assertTrue(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, "/testMove1", Permissions.CAN_WRITE)); + + // move the content + adminContentManager.move("/testMove1", "/testMove2", false); + + // verify that the ACL moved with the content + Assert.assertTrue(adminAccessControlManager.can(user1, Security.ZONE_CONTENT, "/testMove2", Permissions.CAN_WRITE)); + } + + @Test + public void testMoveWithVersions() throws Exception { + Repository repository = (Repository) new BaseMemoryRepository().getRepository(); + Session adminSession = repository.loginAdministrative(); + ContentManager adminContentManager = adminSession.getContentManager(); + + String from = "testMove1"; + String to = "testMove2"; + + // add some initial content + adminContentManager.update(new Content(from, ImmutableMap.of("prop1", "value1"))); + + // save a version of the content and verify the history + adminContentManager.saveVersion(from); + List history = adminContentManager.getVersionHistory(from); + Assert.assertEquals(1, history.size()); + + // move the content + adminContentManager.move(from, to); + + // check the base content is there + Assert.assertTrue(adminContentManager.exists(to)); + + // check the history + history = adminContentManager.getVersionHistory(to); + Assert.assertEquals(1, history.size()); + } + + @Test + public void testMoveWithDestinationHistory() throws Exception { + Repository repository = (Repository) new BaseMemoryRepository().getRepository(); + Session adminSession = repository.loginAdministrative(); + ContentManager adminContentManager = adminSession.getContentManager(); + + String from = "testMove1"; + String to = "testMove2"; + + // add some initial content + adminContentManager.update(new Content(from, ImmutableMap.of("prop1", "value1"))); + adminContentManager.update(new Content(to, ImmutableMap.of("prop2", "value2"))); + + // save a version of the content and verify the history + adminContentManager.saveVersion(from); + List history = adminContentManager.getVersionHistory(from); + Assert.assertEquals(1, history.size()); + + adminContentManager.saveVersion(to); + adminContentManager.saveVersion(to); + adminContentManager.saveVersion(to); + history = adminContentManager.getVersionHistory(to); + Assert.assertEquals(3, history.size()); + + // move the content + adminContentManager.move(from, to, true); + + // check the base content is there + Assert.assertFalse(adminContentManager.exists(from)); + Assert.assertTrue(adminContentManager.exists(to)); + + // ensure we don't have properties from the previous content version + Content movedTo = adminContentManager.get(to); + Assert.assertNull(movedTo.getProperty("prop2")); + Assert.assertEquals("value1", movedTo.getProperty("prop1")); + + // check the history + history = adminContentManager.getVersionHistory(to); + Assert.assertEquals(3, history.size()); + } + + @Test + public void testMoveWithoutDestinationHistory() throws Exception { + Repository repository = (Repository) new BaseMemoryRepository().getRepository(); + Session adminSession = repository.loginAdministrative(); + ContentManager adminContentManager = adminSession.getContentManager(); + + String from = "testMove1"; + String to = "testMove2"; + + // add some initial content + adminContentManager.update(new Content(from, ImmutableMap.of("prop1", "value1"))); + adminContentManager.update(new Content(to, ImmutableMap.of("prop2", "value2"))); + + // save a version of the content and verify the history + adminContentManager.saveVersion(from); + List history = adminContentManager.getVersionHistory(from); + Assert.assertEquals(1, history.size()); + + adminContentManager.saveVersion(to); + adminContentManager.saveVersion(to); + adminContentManager.saveVersion(to); + history = adminContentManager.getVersionHistory(to); + Assert.assertEquals(3, history.size()); + + // move the content + adminContentManager.move(from, to, true, false); + + // check the base content is there + Assert.assertTrue(adminContentManager.exists(to)); + + // ensure we don't have properties from the previous content version + Content movedTo = adminContentManager.get(to); + Assert.assertNull(movedTo.getProperty("prop2")); + Assert.assertEquals("value1", movedTo.getProperty("prop1")); + + // check the history + history = adminContentManager.getVersionHistory(to); + Assert.assertEquals(1, history.size()); + } + + @Test + public void testMoveThenVersionAtDepth() throws Exception { + Repository repository = (Repository) new BaseMemoryRepository().getRepository(); + Session adminSession = repository.loginAdministrative(); + ContentManager adminContentManager = adminSession.getContentManager(); + + String from = "testMove1"; + String tmp = "tmp"; + String to = from + "/" + tmp; + + // add some initial content + adminContentManager.update(new Content(from, ImmutableMap.of("prop1", "value1"))); + adminContentManager.update(new Content(tmp, ImmutableMap.of("prop1", "value1"))); + + // save a version of the content and verify the history + adminContentManager.saveVersion(from); + List history = adminContentManager.getVersionHistory(from); + Assert.assertEquals(1, history.size()); + + adminContentManager.saveVersion(tmp); + history = adminContentManager.getVersionHistory(tmp); + Assert.assertEquals(1, history.size()); + + // move the content + adminContentManager.move(tmp, to); + + // check the base content is there + Assert.assertTrue(adminContentManager.exists(to)); + + // check that all history is still there + history = adminContentManager.getVersionHistory(from); + Assert.assertEquals(1, history.size()); + history = adminContentManager.getVersionHistory(to); + Assert.assertEquals(1, history.size()); + + // verify that we can add more history at each node + adminContentManager.saveVersion(from); + history = adminContentManager.getVersionHistory(from); + Assert.assertEquals(2, history.size()); + + adminContentManager.saveVersion(to); + history = adminContentManager.getVersionHistory(to); + Assert.assertEquals(2, history.size()); + } + + @Test + public void testCanReuseAContentPath() throws Exception { + String path = "/pathToReuse" + System.currentTimeMillis(); + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, + accessControlManager, configuration, sharedCache, new LoggingStorageListener()); + contentManager.update(new Content(path, ImmutableMap.of("prop1", (Object) "value1", "prop2", "valueProp2"))); + Content content = contentManager.get(path); + Assert.assertEquals("This property should have been updated.", content.getProperty("prop1"), "value1"); + Assert.assertEquals("This property should have been updated.", content.getProperty("prop2"), "valueProp2"); + contentManager.delete(path); + content = contentManager.get(path); + Assert.assertNull(content); + contentManager.update(new Content(path, ImmutableMap.of("prop1", (Object) "value2"))); + content = contentManager.get(path); + Assert.assertNotNull(content); + Assert.assertEquals("This property should have been updated.", content.getProperty("prop1"), "value2"); + Assert.assertFalse("This property should have been updated.", content.hasProperty("prop2")); + } + + @Test + public void testListChildren() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, + accessControlManager, configuration, null, new LoggingStorageListener()); + + Iterator children = contentManager.listChildren("/testListChildrenDoesNotExist"); + Assert.assertEquals(0, Iterators.size(children)); + + StorageClientUtils.deleteTree(contentManager, "/testListChildren"); + contentManager.update(new Content("/testListChildren", ImmutableMap.of("prop1", (Object) "parent"))); + children = contentManager.listChildren("/testListChildren"); + Assert.assertEquals(0, Iterators.size(children)); + + contentManager.update(new Content("/testListChildren/child1", ImmutableMap.of("someprop1", (Object) "value1"))); + contentManager.update(new Content("/testListChildren/child2", ImmutableMap.of("someprop1", (Object) "value2"))); + contentManager.update(new Content("/testListChildren/child3", ImmutableMap.of("someprop1",(Object) "value3"))); + contentManager.update(new Content("/youreNotMyDad/child4", ImmutableMap.of("someprop1",(Object) "value4"))); + + children = contentManager.listChildren("/testListChildren"); + int childCount = 0; + while (children.hasNext()){ + // Make sure we're getting back the children we saved + Assert.assertNotNull(children.next().getProperty("someprop1")); + childCount++; + } + Assert.assertEquals(3, childCount); + } + + // @Test This Test runs forever and tests for OOM on disposables. + public void testOOM() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, + configuration, null, new LoggingStorageListener()); + contentManager.update(new Content("/testCreateContent", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testCreateContent/test", ImmutableMap.of("prop1", (Object) "value2"))); + contentManager + .update(new Content("/testCreateContent/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); + + + Content obj = contentManager.get("/testCreateContent"); + Assert.assertNotNull(obj); + while (true) { + for (@SuppressWarnings("unused") Content child : obj.listChildren()) { + } + } + } + + + @Test + public void testTrigger() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, sharedCache); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, + accessControlManager, configuration, sharedCache, new LoggingStorageListener()); + contentManager.update(new Content("/testMoveWithChildren", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testMoveWithChildren/movewc", ImmutableMap.of("prop1", + (Object) "value2"))); + contentManager.update(new Content("/testMoveWithChildren/test", ImmutableMap + .of("prop1", (Object) "value3"))); + contentManager.update(new Content("/testMoveWithChildren/test/ing", ImmutableMap.of("prop1", + (Object) "value4"))); + contentManager.triggerRefresh("/testMoveWithChildren/test/ing"); + } + + @Test + public void testTriggerAll() throws StorageClientException, AccessDeniedException { + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, + currentUser, configuration, null, new LoggingStorageListener(), principalValidatorResolver); + + ContentManagerImpl contentManager = new ContentManagerImpl(client, + accessControlManager, configuration, null, new LoggingStorageListener()); + contentManager.update(new Content("/testMoveWithChildren", ImmutableMap.of("prop1", (Object) "value1"))); + contentManager.update(new Content("/testMoveWithChildren/movewc", ImmutableMap.of("prop1", + (Object) "value2"))); + contentManager.update(new Content("/testMoveWithChildren/test", ImmutableMap + .of("prop1", (Object) "value3"))); + contentManager.update(new Content("/testMoveWithChildren/test/ing", ImmutableMap.of("prop1", + (Object) "value4"))); + contentManager.triggerRefreshAll(); + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/content/InternalContentAccess.java b/core/src/test/java/org/sakaiproject/nakamura/lite/content/InternalContentAccess.java new file mode 100644 index 00000000..e40c23e1 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/content/InternalContentAccess.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.content; + +public class InternalContentAccess { + public static void resetInternalContent() { + InternalContent.idFieldIsSet = false; + } +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/JDBCStorageClientTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/JDBCStorageClientTest.java new file mode 100644 index 00000000..be8109c9 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/JDBCStorageClientTest.java @@ -0,0 +1,394 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc; + +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.text.MessageFormat; +import java.util.Map; +import java.util.Set; + +import com.google.common.collect.ImmutableList; +import junit.framework.Assert; + +import org.apache.commons.lang.StringUtils; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Answers; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; +import org.sakaiproject.nakamura.lite.storage.jdbc.JDBCStorageClient; +import org.sakaiproject.nakamura.lite.storage.jdbc.BaseJDBCStorageClientPool; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +/** + * + */ +@RunWith(MockitoJUnitRunner.class) +public class JDBCStorageClientTest { + JDBCStorageClient client; + + @Mock + BaseJDBCStorageClientPool connPool; + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + Connection conn; + + + @Mock + PreparedStatement ps2; + + + @Mock + ResultSet rs2; + + Map properties = Maps.newHashMap(); + Map sqlConfig = Maps.newHashMap(); + + @Before + public void setUp() throws Exception { + // have the pool return the connection we control + when(connPool.getConnection()).thenReturn(conn); + + // funnel in data when the indexed columns are looked up + when(conn.prepareStatement(anyString())).thenReturn(ps2); + when(ps2.executeQuery()).thenReturn(rs2); + + // give back some bogus db vendor data + when(conn.getMetaData().getDatabaseProductName()).thenReturn("Sakai Nakamura"); + when(conn.getMetaData().getDatabaseMajorVersion()).thenReturn(1); + when(conn.getMetaData().getDatabaseMinorVersion()).thenReturn(0); + + + sqlConfig = new BaseJDBCStorageClientPool().getSqlConfig(conn); + Set colnames = ImmutableSet.of("conjunctions:key1","conjunctions:key2","conjunctions:key3","conjunctions:key4", + "conjunctions:testKey1","conjunctions:testKey2","conjunctions:testKey3","conjunctions:testKey4"); + + client = new JDBCStorageClient(connPool, properties, sqlConfig, colnames, null, null, false); + } + + @Test + @SuppressWarnings(value="ODR_OPEN_DATABASE_RESOURCE", justification="Mock objects") + public void test2TermsAnd() throws Exception { + + String keySpace = "cn"; + String columnFamily = "conjunctions"; + Map props = Maps.newLinkedHashMap(); + props.put("key1", "val1"); + props.put("key2", "val2"); + client.find(keySpace, columnFamily, props, null); + + ArgumentCaptor sqlCaptor = ArgumentCaptor.forClass(String.class); + verify(conn, atLeastOnce()).prepareStatement(sqlCaptor.capture()); + verify(ps2).setObject(1, "key1"); + verify(ps2).setObject(2, "val1"); + verify(ps2).setObject(3, "key2"); + verify(ps2).setObject(4, "val2"); + + String sqlTemplate = (String) sqlConfig.get("block-find"); + String[] statementParts = StringUtils.split(sqlTemplate, ';'); + + StringBuilder tables = new StringBuilder().append( + MessageFormat.format(statementParts[1], "a0")).append( + MessageFormat.format(statementParts[1], "a1")); + StringBuilder where = new StringBuilder().append(" (") + .append(MessageFormat.format(statementParts[2], "a0")).append(") AND (") + .append(MessageFormat.format(statementParts[2], "a1")).append(") AND"); + + String expectedSql = MessageFormat.format(statementParts[0], tables.toString(), + where.toString()); + + String sql = sqlCaptor.getValue(); + Assert.assertEquals(expectedSql, sql); + } + + @Test + @SuppressWarnings(value="ODR_OPEN_DATABASE_RESOURCE", justification="Mock objects") + public void test2TermsOr() throws Exception { + + String keySpace = "cn"; + String columnFamily = "conjunctions"; + Map container = Maps.newHashMap(); + Map orSet = Maps.newLinkedHashMap(); + orSet.put("key1", "val1"); + orSet.put("key2", "val2"); + container.put("orSet", orSet); + client.find(keySpace, columnFamily, container, null); + + ArgumentCaptor sqlCaptor = ArgumentCaptor.forClass(String.class); + verify(conn, atLeastOnce()).prepareStatement(sqlCaptor.capture()); + verify(ps2).setObject(1, "key1"); + verify(ps2).setObject(2, "val1"); + verify(ps2).setObject(3, "key2"); + verify(ps2).setObject(4, "val2"); + + String sqlTemplate = (String) sqlConfig.get("block-find"); + String[] statementParts = StringUtils.split(sqlTemplate, ';'); + + StringBuilder tables = new StringBuilder().append( + MessageFormat.format(statementParts[1], "a0")).append( + MessageFormat.format(statementParts[1], "a1")); + StringBuilder where = new StringBuilder().append(" ( (") + .append(MessageFormat.format(statementParts[2], "a0")).append(") OR (") + .append(MessageFormat.format(statementParts[2], "a1")).append(")) AND"); + + String expectedSql = MessageFormat.format(statementParts[0], tables.toString(), + where.toString()); + + String sql = sqlCaptor.getValue(); + Assert.assertEquals(expectedSql, sql); + } + + @Test + @SuppressWarnings(value="ODR_OPEN_DATABASE_RESOURCE", justification="Mock objects") + public void test1TermAnd2TermsOr1TermAnd() throws Exception { + + String keySpace = "cn"; + String columnFamily = "conjunctions"; + Map orSet = Maps.newLinkedHashMap(); + orSet.put("key1", "val1"); + orSet.put("key2", "val2"); + + Map container = Maps.newLinkedHashMap(); + container.put("testKey1", "testVal1"); + container.put("orSet", orSet); + container.put("testKey2", "testVal2"); + + client.find(keySpace, columnFamily, container, null); + + ArgumentCaptor sqlCaptor = ArgumentCaptor.forClass(String.class); + verify(conn, atLeastOnce()).prepareStatement(sqlCaptor.capture()); + int i = 0; + verify(ps2).setObject(++i, "testKey1"); + verify(ps2).setObject(++i, "testVal1"); + verify(ps2).setObject(++i, "key1"); + verify(ps2).setObject(++i, "val1"); + verify(ps2).setObject(++i, "key2"); + verify(ps2).setObject(++i, "val2"); + verify(ps2).setObject(++i, "testKey2"); + verify(ps2).setObject(++i, "testVal2"); + + String sqlTemplate = (String) sqlConfig.get("block-find"); + String[] statementParts = StringUtils.split(sqlTemplate, ';'); + + StringBuilder tables = new StringBuilder() + .append(MessageFormat.format(statementParts[1], "a0")) + .append(MessageFormat.format(statementParts[1], "a1")) + .append(MessageFormat.format(statementParts[1], "a2")) + .append(MessageFormat.format(statementParts[1], "a3")); + StringBuilder where = new StringBuilder().append(" (") + .append(MessageFormat.format(statementParts[2], "a0")).append(") AND (") + .append(" (").append(MessageFormat.format(statementParts[2], "a1")) + .append(") OR (").append(MessageFormat.format(statementParts[2], "a2")) + .append(")) AND (").append(MessageFormat.format(statementParts[2], "a3")) + .append(") AND"); + + String expectedSql = MessageFormat.format(statementParts[0], tables.toString(), + where.toString()); + + String sql = sqlCaptor.getValue(); + Assert.assertEquals(expectedSql, sql); + } + + @Test + @SuppressWarnings(value="ODR_OPEN_DATABASE_RESOURCE", justification="Mock objects") + public void test2TermsAnd2TermsOr2TermsOr() throws Exception { + + String keySpace = "cn"; + String columnFamily = "conjunctions"; + Map orSet1 = Maps.newLinkedHashMap(); + orSet1.put("key1", "val1"); + orSet1.put("key2", "val2"); + Map orSet2 = Maps.newLinkedHashMap(); + orSet2.put("key3", "val3"); + orSet2.put("key4", "val4"); + + Map container = Maps.newLinkedHashMap(); + container.put("testKey1", "testVal1"); + container.put("testKey2", "testVal2"); + container.put("orSet1", orSet1); + container.put("orSet2", orSet2); + + client.find(keySpace, columnFamily, container, null); + + ArgumentCaptor sqlCaptor = ArgumentCaptor.forClass(String.class); + verify(conn, atLeastOnce()).prepareStatement(sqlCaptor.capture()); + int i = 0; + verify(ps2).setObject(++i, "testKey1"); + verify(ps2).setObject(++i, "testVal1"); + verify(ps2).setObject(++i, "testKey2"); + verify(ps2).setObject(++i, "testVal2"); + verify(ps2).setObject(++i, "key1"); + verify(ps2).setObject(++i, "val1"); + verify(ps2).setObject(++i, "key2"); + verify(ps2).setObject(++i, "val2"); + verify(ps2).setObject(++i, "key3"); + verify(ps2).setObject(++i, "val3"); + verify(ps2).setObject(++i, "key4"); + verify(ps2).setObject(++i, "val4"); + + String sqlTemplate = (String) sqlConfig.get("block-find"); + String[] statementParts = StringUtils.split(sqlTemplate, ';'); + + StringBuilder tables = new StringBuilder() + .append(MessageFormat.format(statementParts[1], "a0")) + .append(MessageFormat.format(statementParts[1], "a1")) + .append(MessageFormat.format(statementParts[1], "a2")) + .append(MessageFormat.format(statementParts[1], "a3")) + .append(MessageFormat.format(statementParts[1], "a4")) + .append(MessageFormat.format(statementParts[1], "a5")); + StringBuilder where = new StringBuilder().append(" (") + .append(MessageFormat.format(statementParts[2], "a0")).append(") AND (") + .append(MessageFormat.format(statementParts[2], "a1")).append(") AND") + .append(" ( (").append(MessageFormat.format(statementParts[2], "a2")) + .append(") OR (").append(MessageFormat.format(statementParts[2], "a3")) + .append(")) AND").append(" ( (") + .append(MessageFormat.format(statementParts[2], "a4")).append(") OR (") + .append(MessageFormat.format(statementParts[2], "a5")).append(")) AND"); + + String expectedSql = MessageFormat.format(statementParts[0], tables.toString(), + where.toString()); + + String sql = sqlCaptor.getValue(); + Assert.assertEquals(expectedSql, sql); + } + + @Test + @SuppressWarnings(value="ODR_OPEN_DATABASE_RESOURCE", justification="Mock objects") + public void test2Terms1Indexed() throws Exception { + + String keySpace = "cn"; + String columnFamily = "conjunctions"; + Map props = Maps.newLinkedHashMap(); + props.put("key1", "val1"); + props.put("key2not", "val2"); + client.find(keySpace, columnFamily, props, null); + + ArgumentCaptor sqlCaptor = ArgumentCaptor.forClass(String.class); + verify(conn, atLeastOnce()).prepareStatement(sqlCaptor.capture()); + verify(ps2).setObject(1, "key1"); + verify(ps2).setObject(2, "val1"); + + String sqlTemplate = (String) sqlConfig.get("block-find"); + String[] statementParts = StringUtils.split(sqlTemplate, ';'); + + StringBuilder tables = new StringBuilder().append(MessageFormat.format( + statementParts[1], "a0")); + StringBuilder where = new StringBuilder().append(" (").append( + MessageFormat.format(statementParts[2], "a0")).append(") AND"); + + String expectedSql = MessageFormat.format(statementParts[0], tables.toString(), + where.toString()); + + String sql = sqlCaptor.getValue(); + Assert.assertEquals(expectedSql, sql); + } + + @Test + @SuppressWarnings(value="ODR_OPEN_DATABASE_RESOURCE", justification="Mock objects") + public void test2TermsMultivalueAnd() throws Exception { + + String keySpace = "cn"; + String columnFamily = "conjunctions"; + Map props = Maps.newLinkedHashMap(); + props.put("key1", ImmutableList.of("val1", "val2")); + props.put("key2", "val2"); + client.find(keySpace, columnFamily, props, null); + + ArgumentCaptor sqlCaptor = ArgumentCaptor.forClass(String.class); + verify(conn, atLeastOnce()).prepareStatement(sqlCaptor.capture()); + int i = 0; + verify(ps2).setObject(++i, "key1"); + verify(ps2).setObject(++i, "val1"); + verify(ps2).setObject(++i, "key1"); + verify(ps2).setObject(++i, "val2"); + verify(ps2).setObject(++i, "key2"); + verify(ps2).setObject(++i, "val2"); + + String sqlTemplate = (String) sqlConfig.get("block-find"); + String[] statementParts = StringUtils.split(sqlTemplate, ';'); + + StringBuilder tables = new StringBuilder().append( + MessageFormat.format(statementParts[1], "a0")).append( + MessageFormat.format(statementParts[1], "a1")).append( + MessageFormat.format(statementParts[1], "a2")); + StringBuilder where = new StringBuilder().append(" (") + .append(MessageFormat.format(statementParts[2], "a0")).append(") AND (") + .append(MessageFormat.format(statementParts[2], "a1")).append(") AND (") + .append(MessageFormat.format(statementParts[2], "a2")).append(") AND"); + + String expectedSql = MessageFormat.format(statementParts[0], tables.toString(), + where.toString()); + + String sql = sqlCaptor.getValue(); + Assert.assertEquals(expectedSql, sql); + } + + @Test + @SuppressWarnings(value="ODR_OPEN_DATABASE_RESOURCE", justification="Mock objects") + public void test2TermsMultivalueOr() throws Exception { + + String keySpace = "cn"; + String columnFamily = "conjunctions"; + Map container = Maps.newHashMap(); + Map orSet = Maps.newLinkedHashMap(); + orSet.put("key1", ImmutableList.of("val1", "val2")); + orSet.put("key2", "val2"); + container.put("orSet", orSet); + client.find(keySpace, columnFamily, container, null); + + ArgumentCaptor sqlCaptor = ArgumentCaptor.forClass(String.class); + verify(conn, atLeastOnce()).prepareStatement(sqlCaptor.capture()); + int i = 0; + verify(ps2).setObject(++i, "key1"); + verify(ps2).setObject(++i, "val1"); + verify(ps2).setObject(++i, "key1"); + verify(ps2).setObject(++i, "val2"); + verify(ps2).setObject(++i, "key2"); + verify(ps2).setObject(++i, "val2"); + + String sqlTemplate = (String) sqlConfig.get("block-find"); + String[] statementParts = StringUtils.split(sqlTemplate, ';'); + + StringBuilder tables = new StringBuilder().append( + MessageFormat.format(statementParts[1], "a0")).append( + MessageFormat.format(statementParts[1], "a1")); + StringBuilder where = new StringBuilder().append(" ( (") + .append(MessageFormat.format(statementParts[2], "a0")).append(") OR (") + .append(MessageFormat.format(statementParts[2], "a0")).append(") OR (") + .append(MessageFormat.format(statementParts[2], "a1")).append(")) AND"); + + String expectedSql = MessageFormat.format(statementParts[0], tables.toString(), + where.toString()); + + String sql = sqlCaptor.getValue(); + Assert.assertEquals(expectedSql, sql); + } +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AccessControlManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AccessControlManagerImplTest.java similarity index 78% rename from src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AccessControlManagerImplTest.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AccessControlManagerImplTest.java index cc90752f..cb2698d2 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AccessControlManagerImplTest.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AccessControlManagerImplTest.java @@ -17,14 +17,15 @@ */ package org.sakaiproject.nakamura.lite.jdbc.derby; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.accesscontrol.AbstractAccessControlManagerImplTest; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; public class AccessControlManagerImplTest extends AbstractAccessControlManagerImplTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { - return DerbySetup.getClientPool(); + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return DerbySetup.getClientPool(configuration); } } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AuthorizableManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AuthorizableManagerImplTest.java similarity index 78% rename from src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AuthorizableManagerImplTest.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AuthorizableManagerImplTest.java index 15f6252f..7e388e18 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AuthorizableManagerImplTest.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/AuthorizableManagerImplTest.java @@ -17,14 +17,15 @@ */ package org.sakaiproject.nakamura.lite.jdbc.derby; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.authorizable.AbstractAuthorizableManagerImplTest; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; public class AuthorizableManagerImplTest extends AbstractAuthorizableManagerImplTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { - return DerbySetup.getClientPool(); + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return DerbySetup.getClientPool(configuration); } } diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/ContentManagerFinderImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/ContentManagerFinderImplTest.java new file mode 100644 index 00000000..917e8b8b --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/ContentManagerFinderImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.derby; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerFinderTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class ContentManagerFinderImplTest extends AbstractContentManagerFinderTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return DerbySetup.getClientPool(configuration); + } + +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/ContentManagerManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/ContentManagerImplTest.java similarity index 72% rename from src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/ContentManagerManagerImplTest.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/ContentManagerImplTest.java index 3b0c2330..4b561f0a 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/ContentManagerManagerImplTest.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/ContentManagerImplTest.java @@ -17,14 +17,15 @@ */ package org.sakaiproject.nakamura.lite.jdbc.derby; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.content.AbstractContentManagerTest; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; -public class ContentManagerManagerImplTest extends AbstractContentManagerTest { +public class ContentManagerImplTest extends AbstractContentManagerTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { - return DerbySetup.getClientPool(); + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return DerbySetup.getClientPool(configuration); } } diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/DerbySetup.java b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/DerbySetup.java new file mode 100644 index 00000000..1c72b42c --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/DerbySetup.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.derby; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.DummyStorageCacheManager; +import org.sakaiproject.nakamura.lite.storage.jdbc.BaseJDBCStorageClientPool; + +public class DerbySetup { + + private static BaseJDBCStorageClientPool clientPool = null; + + private synchronized static BaseJDBCStorageClientPool createClientPool(Configuration configuration, String location) { + try { + BaseJDBCStorageClientPool connectionPool = new BaseJDBCStorageClientPool(); + connectionPool.storageManagerCache = new DummyStorageCacheManager(); + Builder configBuilder = ImmutableMap.builder(); + if ( location == null ) { + location = "jdbc:derby:memory:MyDB;create=true"; + } + configBuilder.put(BaseJDBCStorageClientPool.CONNECTION_URL, + location); + configBuilder.put(BaseJDBCStorageClientPool.JDBC_DRIVER, "org.apache.derby.jdbc.EmbeddedDriver"); + configBuilder.put("store-base-dir", "target/store"); + configBuilder.put(Configuration.class.getName(), configuration); + connectionPool.activate(configBuilder.build()); + return connectionPool; + } catch (ClassNotFoundException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + public static BaseJDBCStorageClientPool getClientPool(Configuration configuration) { + return getClientPool(configuration, null); + } + + public synchronized static BaseJDBCStorageClientPool getClientPool(Configuration configuration, String location) { + if ( clientPool == null ) { + clientPool = createClientPool(configuration, location); + } + return clientPool; + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/KeyValueRowsMain.java b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/KeyValueRowsMain.java new file mode 100644 index 00000000..d3a9ca38 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/KeyValueRowsMain.java @@ -0,0 +1,298 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.derby; + +import java.io.File; +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.io.FileUtils; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +public class KeyValueRowsMain { + + private Connection connection; + private String[] dictionary; + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public KeyValueRowsMain() { + System.err.println(this.getClass().getName()); + } + + public void deleteDb(String file) { + FileUtils.deleteQuietly(new File(file)); + } + + public void open(String file) throws SQLException { + connection = DriverManager + .getConnection("jdbc:derby:" + file + "/db;create=true", "sa", ""); + } + + public void createTables(int columns) throws SQLException { + Statement s = connection.createStatement(); + StringBuilder sql = new StringBuilder(); + sql.append("CREATE TABLE cn_css_kv ("); + sql.append("id INTEGER NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1, INCREMENT BY 1),"); + sql.append("rid varchar(32) NOT NULL,"); + sql.append("cid varchar(64) NOT NULL,"); + sql.append("v varchar(740),"); + sql.append("primary key(id))"); + s.execute(sql.toString()); + s.execute("CREATE UNIQUE INDEX cn_css_kv_rc ON cn_css_kv (rid,cid)"); + s.execute("CREATE INDEX cn_css_kv_cv ON cn_css_kv (cid,v)"); + s.close(); + } + + public void populateDictionary(int size) throws NoSuchAlgorithmException, + UnsupportedEncodingException { + dictionary = new String[size]; + MessageDigest md = MessageDigest.getInstance("SHA1"); + for (int i = 0; i < size; i++) { + dictionary[i] = Base64.encodeBase64URLSafeString(md.digest(String.valueOf("Dict" + i) + .getBytes("UTF-8"))); + } + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void loadTable(int columns, int records) throws SQLException, + UnsupportedEncodingException, NoSuchAlgorithmException { + StringBuilder sb = new StringBuilder(); + sb.append("insert into cn_css_kv (rid, cid, v) values ( ?, ?, ?)"); + PreparedStatement p = null; + Statement s = null; + ResultSet rs = null; + try { + p = connection.prepareStatement(sb.toString()); + MessageDigest sha1 = MessageDigest.getInstance("SHA1"); + SecureRandom sr = new SecureRandom(); + long cs = System.currentTimeMillis(); + s = connection.createStatement(); + rs = s.executeQuery( + "select count(*) from cn_css_kv"); + rs.next(); + int nrows = rs.getInt(1); + for (int i = 0 + nrows; i < records + nrows; i++) { + String rid = Base64.encodeBase64URLSafeString(sha1.digest(String + .valueOf("TEST" + i).getBytes("UTF-8"))); + for (int j = 0; j < columns; j++) { + if (sr.nextBoolean()) { + p.clearParameters(); + p.setString(1, rid); + p.setString(2, "v" + j); + p.setString(3, dictionary[sr.nextInt(dictionary.length)]); + p.execute(); + } + } + + if (i % 500 == 0) { + connection.commit(); + long ct = System.currentTimeMillis(); + System.err.print(""+i+","+(ct-cs)+","); + testSelect(2, 0, columns, 5000, true); + cs = System.currentTimeMillis(); + + } + } + } finally { + if (rs != null) { + try { + rs.close(); + } catch (SQLException e ) { + + } + } + if (s != null) { + try { + s.close(); + } catch (SQLException e ) { + + } + } + if (p != null) { + try { + p.close(); + } catch (SQLException e ) { + + } + } + } + } + + private void close() throws SQLException { + connection.close(); + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void testSelect(int ncols, int sorts, int columns, long timeToLive, boolean csv) throws SQLException { + StringBuilder sb = new StringBuilder(); + SecureRandom sr = new SecureRandom(); + Set used = new LinkedHashSet(); + while (used.size() < ncols) { + int c = sr.nextInt(columns); + if (!used.contains(c)) { + used.add(c); + } + } + Integer[] cnums = used.toArray(new Integer[ncols]); + sb.append("select distinct a.rid "); + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + sb.append(", s").append(i).append(".v "); + } + } + sb.append(" from cn_css_kv a "); + for (int i = 0; i < ncols; i++) { + sb.append(" , cn_css_kv a").append(i); + } + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + sb.append(" , cn_css_kv s").append(i); + } + } + sb.append(" where "); + for (int i = 0; i < ncols; i++) { + sb.append(" a").append(i).append(".cid = ? AND a").append(i).append(".v = ? AND a") + .append(i).append(".rid = a.rid AND "); + } + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + sb.append("s").append(i).append(".cid = ? AND a.rid = s").append(i) + .append(".rid AND "); + } + } + sb.append(" 1 = 1 "); + Integer[] snums = null; + if (sorts > 0) { + sb.append(" order by "); + used.clear(); + while (used.size() < sorts) { + int c = sr.nextInt(columns); + if (!used.contains(c)) { + used.add(c); + } + } + snums = used.toArray(new Integer[ncols]); + for (int i = 0; i < sorts - 1; i++) { + sb.append("s").append(i).append(".v ,"); + } + sb.append("s").append(sorts - 1).append(".v "); + } + if ( !csv) { + System.err.println(sb.toString()); + } + PreparedStatement p = null; + ResultSet rs = null; + long atstart = System.currentTimeMillis(); + int arows = 0; + int nq = 0; + try { + p = connection.prepareStatement(sb.toString()); + long endTestTime = atstart + timeToLive; + while (System.currentTimeMillis() < endTestTime) { + p.clearParameters(); + for (int i = 0; i < ncols; i++) { + p.setString(i * 2 + 1, "v" + cnums[i]); + p.setString(i * 2 + 2, dictionary[sr.nextInt(dictionary.length)]); + } + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + p.setString(i + 1 + ncols * 2, "s" + snums[i]); + } + } + rs = p.executeQuery(); + int rows = 0; + while (rs.next()) { + rows++; + } + arows += rows; + nq++; + rs.close(); + } + } finally { + if (rs != null) { + try { + rs.close(); + } catch (SQLException e) { + + } + } + if (p != null) { + try { + p.close(); + } catch (SQLException e) { + + } + } + } + double t = System.currentTimeMillis() - atstart; + double a = t / nq; + if ( csv ) { + System.err.println("" + (arows / nq) + "," + a ); + } else { + System.err.println("Found " + arows + " in " + t + "ms executed " + nq + " queries"); + System.err.println("Average " + (arows / nq) + " in " + a + "ms"); + } + + } + + public static void main(String[] argv) throws SQLException, NoSuchAlgorithmException, + UnsupportedEncodingException { + KeyValueRowsMain tmr = new KeyValueRowsMain(); + String db = "target/testkv"; + tmr.deleteDb(db); + boolean exists = new File(db).exists(); + tmr.open(db); + if (!exists) { + tmr.createTables(30); + } + tmr.populateDictionary(20); + tmr.loadTable(30, 10000); + tmr.testSelect(1, 0, 30, 5000); + tmr.testSelect(2, 0, 30, 5000); + tmr.testSelect(3, 0, 30, 5000); + tmr.testSelect(4, 0, 30, 5000); + tmr.testSelect(5, 0, 30, 5000); + tmr.testSelect(1, 1, 30, 5000); + tmr.testSelect(2, 1, 30, 5000); + tmr.testSelect(3, 1, 30, 5000); + tmr.testSelect(4, 1, 30, 5000); + tmr.testSelect(5, 1, 30, 5000); + tmr.testSelect(1, 2, 30, 5000); + tmr.testSelect(2, 2, 30, 5000); + tmr.testSelect(3, 2, 30, 5000); + tmr.testSelect(4, 2, 30, 5000); + tmr.testSelect(5, 2, 30, 5000); + tmr.close(); + } + private void testSelect(int i, int j, int k, int l) throws SQLException { + testSelect(i, j, k, l, false); + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/LockManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/LockManagerImplTest.java new file mode 100644 index 00000000..d6615966 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/LockManagerImplTest.java @@ -0,0 +1,14 @@ +package org.sakaiproject.nakamura.lite.jdbc.derby; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.lock.AbstractLockManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class LockManagerImplTest extends AbstractLockManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return DerbySetup.getClientPool(configuration); + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/MultiRowsMain.java b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/MultiRowsMain.java new file mode 100644 index 00000000..3594a7b6 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/MultiRowsMain.java @@ -0,0 +1,238 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.derby; + +import java.io.File; +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.io.FileUtils; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +public class MultiRowsMain { + + + private Connection connection; + private String[] dictionary; + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public MultiRowsMain() { + System.err.println(this.getClass().getName()); + } + + public void deleteDb(String file) { + FileUtils.deleteQuietly(new File(file)); + } + + public void open(String file) throws SQLException { + connection = DriverManager.getConnection("jdbc:derby:"+file+"/db;create=true", "sa", ""); + } + + public void createTables(int columns) throws SQLException { + Statement s = connection.createStatement(); + StringBuilder sql = new StringBuilder(); + sql.append("CREATE TABLE cn_css_index ("); + sql.append("rid varchar(32) NOT NULL,"); + for ( int i = 0; i < columns; i++ ) { + sql.append("v").append(i).append(" varchar(780),"); + } + sql.append("primary key(rid))"); + s.execute(sql.toString()); + for ( int i = 0; i < columns; i++) { + s.execute("CREATE INDEX cn_css_index_v"+i+" ON cn_css_index (v"+i+")"); + } + s.close(); + } + + + + public void populateDictionary(int size) throws NoSuchAlgorithmException, UnsupportedEncodingException { + dictionary = new String[size]; + MessageDigest md = MessageDigest.getInstance("SHA1"); + for ( int i = 0; i < size; i++) { + dictionary[i] = Base64.encodeBase64URLSafeString(md.digest(String.valueOf("Dict"+i).getBytes("UTF-8"))); + } + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void loadTable(int columns, int records) throws SQLException, UnsupportedEncodingException, NoSuchAlgorithmException { + StringBuilder sb = new StringBuilder(); + sb.append("insert into cn_css_index (rid"); + for ( int i = 0; i < columns; i++ ) { + sb.append(",v").append(i); + } + sb.append(") values ( ?"); + for ( int i = 0; i < columns; i++ ) { + sb.append(",?"); + } + sb.append(")"); + Statement s = connection.createStatement(); + PreparedStatement p = connection.prepareStatement(sb.toString()); + MessageDigest sha1 = MessageDigest.getInstance("SHA1"); + SecureRandom sr = new SecureRandom(); + long cst = System.currentTimeMillis(); + long cs = System.currentTimeMillis(); + ResultSet rs = s.executeQuery("select count(*) from cn_css_index"); + rs.next(); + int nrows = rs.getInt(1); + for ( int i = 0+nrows; i < records+nrows; i++) { + String rid = Base64.encodeBase64URLSafeString(sha1.digest(String.valueOf("TEST"+i).getBytes("UTF-8"))); + p.clearParameters(); + p.setString(1, rid); + for ( int j = 2; j <= columns+1; j++) { + if ( sr.nextBoolean() ) { + p.setString(j, dictionary[sr.nextInt(dictionary.length)]); + } else { + p.setNull(j, Types.VARCHAR); + } + } + p.execute(); + + if ( i%500 == 0) { + connection.commit(); + long ct = System.currentTimeMillis(); + System.err.print(""+i+","+(ct-cs)+","); + testSelect(2, 0, columns, 5000, true); + cs = System.currentTimeMillis(); + + } + } + long ctt = System.currentTimeMillis(); + System.err.println("Commit "+records+" "+(ctt-cst)+" ms average time per row to insert "+((double)records/((double)ctt-(double)cst))); + p.close(); + s.close(); + } + private void close() throws SQLException { + connection.close(); + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void testSelect(int ncols, int sorts, int columns, long timeToLive, boolean csv) throws SQLException { + StringBuilder sb = new StringBuilder(); + sb.append("select rid from cn_css_index where "); + SecureRandom sr = new SecureRandom(); + Set used = new LinkedHashSet(); + while(used.size() < ncols ) { + int c = sr.nextInt(columns); + if ( !used.contains(c) ) { + used.add(c); + } + } + Integer[] cnums = used.toArray(new Integer[ncols]); + for ( int i = 0; i < ncols-1; i++ ) { + + sb.append("v").append(cnums[i]).append(" = ? AND "); + } + sb.append("v").append(cnums[ncols-1]).append(" = ? "); + if ( sorts > 0 ) { + sb.append(" order by "); + used.clear(); + while(used.size() < sorts ) { + int c = sr.nextInt(columns); + if ( !used.contains(c) ) { + used.add(c); + } + } + cnums = used.toArray(new Integer[ncols]); + for ( int i = 0; i < sorts-1; i++ ) { + sb.append("v").append(cnums[i]).append(","); + } + sb.append("v").append(cnums[sorts-1]); + } + if ( !csv) { + System.err.println(sb.toString()); + } + PreparedStatement p = connection.prepareStatement(sb.toString()); + long atstart = System.currentTimeMillis(); + long endTestTime = atstart+timeToLive; + int nq = 0; + int arows = 0; + while(System.currentTimeMillis() < endTestTime) { + p.clearParameters(); + for ( int i = 1; i <= ncols; i++ ) { + p.setString(i, dictionary[sr.nextInt(dictionary.length)]); + } + ResultSet rs = p.executeQuery(); + int rows = 0; + while(rs.next()) { + rows++; + } + arows += rows; + nq++; + rs.close(); + } + double t = System.currentTimeMillis()-atstart; + double a = t/nq; + if ( csv ) { + System.err.println("" + (arows / nq) + "," + a ); + } else { + System.err.println("Found " + arows + " in " + t + "ms executed " + nq + " queries"); + System.err.println("Average " + (arows / nq) + " in " + a + "ms"); + } + p.close(); + + } + + public static void main(String[] argv) throws SQLException, NoSuchAlgorithmException, UnsupportedEncodingException { + MultiRowsMain tmr = new MultiRowsMain(); + String db = "target/testwide"; + tmr.deleteDb(db); + boolean exists = new File("target/testwide").exists(); + tmr.open(db); + if ( ! exists ) { + tmr.createTables(30); + } + tmr.populateDictionary(20); + tmr.loadTable(30, 10000); + tmr.testSelect(1, 0, 30, 5000); + tmr.testSelect(2, 0, 30, 5000); + tmr.testSelect(3, 0, 30, 5000); + tmr.testSelect(4, 0, 30, 5000); + tmr.testSelect(5, 0, 30, 5000); + tmr.testSelect(1, 1, 30, 5000); + tmr.testSelect(2, 1, 30, 5000); + tmr.testSelect(3, 1, 30, 5000); + tmr.testSelect(4, 1, 30, 5000); + tmr.testSelect(5, 1, 30, 5000); + tmr.testSelect(1, 2, 30, 5000); + tmr.testSelect(2, 2, 30, 5000); + tmr.testSelect(3, 2, 30, 5000); + tmr.testSelect(4, 2, 30, 5000); + tmr.testSelect(5, 2, 30, 5000); + tmr.close(); + } + + private void testSelect(int i, int j, int k, int l) throws SQLException { + testSelect(i, j, k, l, false); + } + + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/lock/AbstractLockManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/lock/AbstractLockManagerImplTest.java new file mode 100644 index 00000000..fd5fe010 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/lock/AbstractLockManagerImplTest.java @@ -0,0 +1,264 @@ +package org.sakaiproject.nakamura.lite.lock; + +import java.io.IOException; +import java.util.Map; + +import junit.framework.Assert; + +import org.junit.Before; +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.lock.AlreadyLockedException; +import org.sakaiproject.nakamura.api.lite.lock.LockState; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +public abstract class AbstractLockManagerImplTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(AbstractLockManagerImplTest.class); + private ConfigurationImpl configuration; + private StorageClientPool clientPool; + private StorageClient client; + + @Before + public void before() throws StorageClientException, AccessDeniedException, ClientPoolException, + ClassNotFoundException, IOException { + + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + properties.put("lock-column-family", "lk"); + configuration = new ConfigurationImpl(); + configuration.activate(properties); + clientPool = getClientPool(configuration); + client = clientPool.getClient(); + AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, + configuration); + authorizableActivator.setup(); + LOGGER.info("Setup Complete"); + } + + protected abstract StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException; + + @Test + public void testRootLock() throws StorageClientException, AccessDeniedException, + AlreadyLockedException { + User currentUser = new User(ImmutableMap.of(User.ID_FIELD, (Object) "ieb", + User.PASSWORD_FIELD, "test")); + LockManagerImpl lockManagerImpl = new LockManagerImpl(client, configuration, currentUser, + null); + String token = lockManagerImpl.lock("/", 10000, "Some Extra Information"); + + checkOwnerLockWithToken(lockManagerImpl, "/", "/", token); + checkOwnerLockWithToken(lockManagerImpl, "/", "/test/test234/sadsdf", token); + + checkOwnerLockWithNoToken(lockManagerImpl, "/", "/", "wrong-token"); + checkOwnerLockWithNoToken(lockManagerImpl, "/", "/test/test234/sadsdf", "wrong-token"); + + + lockManagerImpl.unlock("/", token); + + checkNotLocked(lockManagerImpl, "/", token); + checkNotLocked(lockManagerImpl, "/werttrew", token); + } + + @Test + public void testDeepLock() throws StorageClientException, AccessDeniedException, + AlreadyLockedException { + User currentUser = new User(ImmutableMap.of(User.ID_FIELD, (Object) "ieb", + User.PASSWORD_FIELD, "test")); + LockManagerImpl lockManagerImpl = new LockManagerImpl(client, configuration, currentUser, + null); + String token = lockManagerImpl.lock("/sub/folder", 10000, "Some Extra Information"); + + checkOwnerLockWithToken(lockManagerImpl, "/sub/folder", "/sub/folder", token); + checkOwnerLockWithToken(lockManagerImpl, "/sub/folder", "/sub/folder/test/test234/sadsdf", token); + + checkNotLocked(lockManagerImpl, "/sub", token); + checkNotLocked(lockManagerImpl, "/subtest/test123", token); + + lockManagerImpl.unlock("/sub/folder", token); + + checkNotLocked(lockManagerImpl, "/sub/folder", token); + checkNotLocked(lockManagerImpl, "/sub.folder/test/test123", token); + + } + + @Test + public void testDeepLockExpire() throws StorageClientException, AccessDeniedException, + AlreadyLockedException, InterruptedException { + User currentUser = new User(ImmutableMap.of(User.ID_FIELD, (Object) "ieb", + User.PASSWORD_FIELD, "test")); + LockManagerImpl lockManagerImpl = new LockManagerImpl(client, configuration, currentUser, + null); + String token = lockManagerImpl.lock("/sub/expire", 2, "Some Extra Information"); + + checkOwnerLockWithToken(lockManagerImpl, "/sub/expire", "/sub/expire", token); + checkOwnerLockWithToken(lockManagerImpl, "/sub/expire", "/sub/expire/test234/sadsf", token); + + checkNotLocked(lockManagerImpl, "/sub", token); + checkNotLocked(lockManagerImpl, "/subtest/test123", token); + + + LOGGER.info("Sleeping for 2100ms to allow lock to expire"); + Thread.sleep(2100L); + + checkNotLocked(lockManagerImpl, "/sub/expire", token); + checkNotLocked(lockManagerImpl, "/sub/expire/test23234", token); + + lockManagerImpl.unlock("/sub/expire", token); + + } + + private void checkOwnerLockWithToken(LockManagerImpl lockManagerImpl, String path, String testPath, String token) throws StorageClientException { + Assert.assertTrue(lockManagerImpl.isLocked(testPath)); + LockState lockState = lockManagerImpl.getLockState(testPath, token); + Assert.assertTrue(lockState.isOwner()); + Assert.assertEquals(path, lockState.getLockPath()); + Assert.assertTrue(lockState.hasMatchedToken()); + Assert.assertEquals(token, lockState.getToken()); + } + private void checkOwnerLockWithNoToken(LockManagerImpl lockManagerImpl, String path, String testPath, String token) throws StorageClientException { + Assert.assertTrue(lockManagerImpl.isLocked(testPath)); + LockState lockState = lockManagerImpl.getLockState(testPath, token); + Assert.assertTrue(lockState.isOwner()); + Assert.assertEquals(path, lockState.getLockPath()); + Assert.assertFalse(lockState.hasMatchedToken()); + } + + private void checkLocked(LockManagerImpl lockManagerImpl, String path, String testPath, String token) throws StorageClientException { + Assert.assertTrue(lockManagerImpl.isLocked(testPath)); + LockState lockState = lockManagerImpl.getLockState(testPath, token); + Assert.assertFalse(lockState.isOwner()); + Assert.assertEquals(path, lockState.getLockPath()); + Assert.assertFalse(lockState.hasMatchedToken()); + } + + private void checkNotLocked(LockManagerImpl lockManagerImpl,String testPath, String token) throws StorageClientException { + Assert.assertFalse(lockManagerImpl.isLocked(testPath)); + LockState lockState = lockManagerImpl.getLockState(testPath, token); + Assert.assertFalse(lockState.isOwner()); + Assert.assertNull(lockState.getLockPath()); + Assert.assertFalse(lockState.hasMatchedToken()); + } + + @Test + public void testOtherUserLock() throws StorageClientException, AccessDeniedException, + AlreadyLockedException, InterruptedException { + User currentUser = new User(ImmutableMap.of(User.ID_FIELD, (Object) "ieb", + User.PASSWORD_FIELD, "test")); + LockManagerImpl lockManagerImpl = new LockManagerImpl(client, configuration, currentUser, + null); + String token = lockManagerImpl.lock("/sub/ieb", 2, "Some Extra Information"); + + checkOwnerLockWithToken(lockManagerImpl, "/sub/ieb", "/sub/ieb", token); + checkOwnerLockWithToken(lockManagerImpl, "/sub/ieb", "/sub/ieb/sdfsd/sdf/sdfsdf", token); + + + User user2 = new User(ImmutableMap.of(User.ID_FIELD, (Object) "scl", + User.PASSWORD_FIELD, "test")); + LockManagerImpl lockManagerImpl2 = new LockManagerImpl(client, configuration, user2, + null); + + checkLocked(lockManagerImpl2, "/sub/ieb", "/sub/ieb", token); + checkLocked(lockManagerImpl2, "/sub/ieb", "/sub/ieb/sdfs/sdf/sdf", token); + + + lockManagerImpl.unlock("/sub/ieb", token); + + + } + + + @Test + public void testOtherUserReLock() throws StorageClientException, AccessDeniedException, + AlreadyLockedException, InterruptedException { + User currentUser = new User(ImmutableMap.of(User.ID_FIELD, (Object) "ieb", + User.PASSWORD_FIELD, "test")); + LockManagerImpl lockManagerImpl = new LockManagerImpl(client, configuration, currentUser, + null); + String token = lockManagerImpl.lock("/sub/iebrelock", 20, "Some Extra Information"); + + checkOwnerLockWithToken(lockManagerImpl, "/sub/iebrelock", "/sub/iebrelock", token); + checkOwnerLockWithToken(lockManagerImpl, "/sub/iebrelock", "/sub/iebrelock/sdfsd/sdf/sdfsdf", token); + + User user2 = new User(ImmutableMap.of(User.ID_FIELD, (Object) "scl", + User.PASSWORD_FIELD, "test")); + LockManagerImpl lockManagerImpl2 = new LockManagerImpl(client, configuration, user2, + null); + try { + String token2 = lockManagerImpl2.lock("/sub/iebrelock", 30, "Some More Information"); + Assert.fail(token2); + } catch ( AlreadyLockedException e) { + LOGGER.debug("Ok"); + } + + lockManagerImpl.unlock("/sub/iebrelock", token); + + + + } + + @Test + public void testReLockExpire() throws StorageClientException, AccessDeniedException, + AlreadyLockedException, InterruptedException { + User currentUser = new User(ImmutableMap.of(User.ID_FIELD, (Object) "ieb", + User.PASSWORD_FIELD, "test")); + LockManagerImpl lockManagerImpl = new LockManagerImpl(client, configuration, currentUser, + null); + String token = lockManagerImpl.lock("/sub/iebrelockexpire", 2, "Some Extra Information"); + + checkOwnerLockWithToken(lockManagerImpl, "/sub/iebrelockexpire", "/sub/iebrelockexpire", token); + checkOwnerLockWithToken(lockManagerImpl, "/sub/iebrelockexpire", "/sub/iebrelockexpire/sdfsd/sdf/sdfsdf", token); + + + + + // the lock will have expired so we can replace it. + User user2 = new User(ImmutableMap.of(User.ID_FIELD, (Object) "scl", + User.PASSWORD_FIELD, "test")); + LockManagerImpl lockManagerImpl2 = new LockManagerImpl(client, configuration, user2, + null); + + checkLocked(lockManagerImpl2, "/sub/iebrelockexpire", "/sub/iebrelockexpire", token); + checkLocked(lockManagerImpl2, "/sub/iebrelockexpire", "/sub/iebrelockexpire/sdf/sdf/sdf", token); + + LOGGER.info("Sleeping for 2100ms to allow lock to expire"); + Thread.sleep(2100L); + + checkNotLocked(lockManagerImpl, "/sub/iebrelockexpire", token); + checkNotLocked(lockManagerImpl, "/sub/iebrelockexpire/sdf/sdf/sdf", token); + + checkNotLocked(lockManagerImpl2, "/sub/iebrelockexpire", token); + checkNotLocked(lockManagerImpl2, "/sub/iebrelockexpire/sdf/sdf/sdf", token); + + String token2 = lockManagerImpl2.lock("/sub/iebrelockexpire", 30, "Some More Information"); + + checkOwnerLockWithToken(lockManagerImpl2, "/sub/iebrelockexpire", "/sub/iebrelockexpire", token2); + checkOwnerLockWithToken(lockManagerImpl2, "/sub/iebrelockexpire", "/sub/iebrelockexpire/sdfsd/sdf/sdfsdf", token2); + + checkLocked(lockManagerImpl, "/sub/iebrelockexpire", "/sub/iebrelockexpire", token); + checkLocked(lockManagerImpl, "/sub/iebrelockexpire", "/sub/iebrelockexpire/sdf/sdf/sdf", token); + + + lockManagerImpl2.unlock("/sub/iebexpire", token2); + + + } + + +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/memory/AccessControlManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/AccessControlManagerImplTest.java similarity index 78% rename from src/test/java/org/sakaiproject/nakamura/lite/memory/AccessControlManagerImplTest.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/memory/AccessControlManagerImplTest.java index f0c27719..11674618 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/memory/AccessControlManagerImplTest.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/AccessControlManagerImplTest.java @@ -19,18 +19,20 @@ import com.google.common.collect.ImmutableMap; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.accesscontrol.AbstractAccessControlManagerImplTest; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; public class AccessControlManagerImplTest extends AbstractAccessControlManagerImplTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { MemoryStorageClientPool cp = new MemoryStorageClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/memory/AuthorizableManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/AuthorizableManagerImplTest.java similarity index 78% rename from src/test/java/org/sakaiproject/nakamura/lite/memory/AuthorizableManagerImplTest.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/memory/AuthorizableManagerImplTest.java index 2b22c825..61447bf5 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/memory/AuthorizableManagerImplTest.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/AuthorizableManagerImplTest.java @@ -19,18 +19,20 @@ import com.google.common.collect.ImmutableMap; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.authorizable.AbstractAuthorizableManagerImplTest; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; public class AuthorizableManagerImplTest extends AbstractAuthorizableManagerImplTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { MemoryStorageClientPool cp = new MemoryStorageClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/memory/ContentManagerFinderImplMan.java b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/ContentManagerFinderImplMan.java new file mode 100644 index 00000000..ebe31e8b --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/ContentManagerFinderImplMan.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.memory; + +import com.google.common.collect.ImmutableMap; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerFinderTest; +import org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +public class ContentManagerFinderImplMan extends AbstractContentManagerFinderTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + MemoryStorageClientPool cp = new MemoryStorageClientPool(); + cp.activate(ImmutableMap.of("test", (Object) "test", + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); + return cp; + } +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/memory/ContentManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/ContentManagerImplTest.java similarity index 78% rename from src/test/java/org/sakaiproject/nakamura/lite/memory/ContentManagerImplTest.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/memory/ContentManagerImplTest.java index df87bdd5..db5374d7 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/memory/ContentManagerImplTest.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/ContentManagerImplTest.java @@ -19,19 +19,20 @@ import com.google.common.collect.ImmutableMap; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.content.AbstractContentManagerTest; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; public class ContentManagerImplTest extends AbstractContentManagerTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { MemoryStorageClientPool cp = new MemoryStorageClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } - } diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/memory/LockManagerImplTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/LockManagerImplTest.java new file mode 100644 index 00000000..6714d535 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/memory/LockManagerImplTest.java @@ -0,0 +1,21 @@ +package org.sakaiproject.nakamura.lite.memory; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.lock.AbstractLockManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +import com.google.common.collect.ImmutableMap; + +public class LockManagerImplTest extends AbstractLockManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + MemoryStorageClientPool cp = new MemoryStorageClientPool(); + cp.activate(ImmutableMap.of("test", (Object) "test", + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); + return cp; + } +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractScalingClient.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractScalingClient.java similarity index 72% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractScalingClient.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractScalingClient.java index 745a3043..eff8e2d0 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractScalingClient.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractScalingClient.java @@ -17,17 +17,13 @@ */ package org.sakaiproject.nakamura.lite.soak; -import com.google.common.collect.Maps; - import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.lite.ConfigurationImpl; import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; - -import java.util.Map; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; /** * Base class for multithreads soak tests, this class is the thing that is @@ -40,21 +36,16 @@ public abstract class AbstractScalingClient implements Runnable { protected StorageClientPool clientPool; protected StorageClient client; - protected ConfigurationImpl configuration; + protected Configuration configuration; - public AbstractScalingClient(StorageClientPool clientPool) throws ClientPoolException, + public AbstractScalingClient(StorageClientPool clientPool, Configuration configuration) throws ClientPoolException, StorageClientException, AccessDeniedException { this.clientPool = clientPool; + this.configuration = configuration; } public void setup() throws ClientPoolException, StorageClientException, AccessDeniedException { client = clientPool.getClient(); - configuration = new ConfigurationImpl(); - Map properties = Maps.newHashMap(); - properties.put("keyspace", "n"); - properties.put("acl-column-family", "ac"); - properties.put("authorizable-column-family", "au"); - configuration.activate(properties); AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, configuration); authorizableActivator.setup(); diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractSoakController.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractSoakController.java similarity index 100% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractSoakController.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractSoakController.java diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsClient.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsClient.java similarity index 78% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsClient.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsClient.java index 67b5a1e5..e51d2f7a 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsClient.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsClient.java @@ -21,17 +21,20 @@ import org.sakaiproject.nakamura.api.lite.CacheHolder; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; import org.sakaiproject.nakamura.api.lite.authorizable.User; import org.sakaiproject.nakamura.lite.LoggingStorageListener; import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.PrincipalValidatorResolverImpl; import org.sakaiproject.nakamura.lite.authorizable.AuthorizableManagerImpl; import org.sakaiproject.nakamura.lite.soak.AbstractScalingClient; -import org.sakaiproject.nakamura.lite.storage.ConcurrentLRUMap; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.ConcurrentLRUMap; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; import java.util.Map; @@ -39,10 +42,11 @@ public class CreateUsersAndGroupsClient extends AbstractScalingClient { private int nusers; private Map sharedCache = new ConcurrentLRUMap(1000); + private PrincipalValidatorResolver principalValidatorResolver = new PrincipalValidatorResolverImpl(); - public CreateUsersAndGroupsClient(int totalUsers, StorageClientPool clientPool) + public CreateUsersAndGroupsClient(int totalUsers, StorageClientPool clientPool, Configuration configuration) throws ClientPoolException, StorageClientException, AccessDeniedException { - super(clientPool); + super(clientPool, configuration); nusers = totalUsers; } @@ -51,19 +55,19 @@ public void run() { super.setup(); String tname = String.valueOf(Thread.currentThread().getId()) + String.valueOf(System.currentTimeMillis()); - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl( - client, currentUser, configuration, sharedCache, new LoggingStorageListener()); + client, currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, - client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); + null, client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); for (int i = 0; i < nusers; i++) { String userId = tname + "_" + i; authorizableManager.createUser(userId, userId, "test", ImmutableMap.of(userId, - (Object) "testvalue", "principals", "administrators;testers", + (Object) "testvalue", Authorizable.PRINCIPALS_FIELD, "administrators;testers", Authorizable.AUTHORIZABLE_TYPE_FIELD, Authorizable.GROUP_VALUE)); } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsWithMembersClient.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsWithMembersClient.java similarity index 84% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsWithMembersClient.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsWithMembersClient.java index 65548c0f..d4a97c59 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsWithMembersClient.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/authorizable/CreateUsersAndGroupsWithMembersClient.java @@ -22,16 +22,19 @@ import org.sakaiproject.nakamura.api.lite.CacheHolder; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; import org.sakaiproject.nakamura.api.lite.authorizable.Group; import org.sakaiproject.nakamura.api.lite.authorizable.User; import org.sakaiproject.nakamura.lite.LoggingStorageListener; import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.PrincipalValidatorResolverImpl; import org.sakaiproject.nakamura.lite.authorizable.AuthorizableManagerImpl; import org.sakaiproject.nakamura.lite.soak.AbstractScalingClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; import java.util.List; import java.util.Map; @@ -42,11 +45,12 @@ public class CreateUsersAndGroupsWithMembersClient extends AbstractScalingClient private int nusers; private int ngroups; private Map sharedCache = new ConcurrentHashMap(1000); + private PrincipalValidatorResolver principalValidatorResolver = new PrincipalValidatorResolverImpl(); public CreateUsersAndGroupsWithMembersClient(int totalUsers, int totalGroups, - StorageClientPool connectionPool) throws ClientPoolException, StorageClientException, + StorageClientPool connectionPool, Configuration configuration) throws ClientPoolException, StorageClientException, AccessDeniedException { - super(connectionPool); + super(connectionPool, configuration); nusers = totalUsers; ngroups = totalGroups; } @@ -56,14 +60,14 @@ public void run() { super.setup(); String tname = String.valueOf(Thread.currentThread().getId()) + String.valueOf(System.currentTimeMillis()); - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl( - client, currentUser, configuration, sharedCache, new LoggingStorageListener()); + client, currentUser, configuration, sharedCache, new LoggingStorageListener(), principalValidatorResolver); AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, - client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); + null, client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); List userNames = Lists.newArrayList(); List groupNames = Lists.newArrayList(); diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/soak/content/ContentCreateClient.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/content/ContentCreateClient.java new file mode 100644 index 00000000..6079220d --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/content/ContentCreateClient.java @@ -0,0 +1,129 @@ +package org.sakaiproject.nakamura.lite.soak.content; + +import java.util.Iterator; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.PrincipalValidatorResolver; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.lite.LoggingStorageListener; +import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; +import org.sakaiproject.nakamura.lite.accesscontrol.PrincipalValidatorResolverImpl; +import org.sakaiproject.nakamura.lite.authorizable.AuthorizableManagerImpl; +import org.sakaiproject.nakamura.lite.content.ContentManagerImpl; +import org.sakaiproject.nakamura.lite.soak.AbstractScalingClient; +import org.sakaiproject.nakamura.lite.storage.spi.ConcurrentLRUMap; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ContentCreateClient extends AbstractScalingClient { + + private static final Logger LOGGER = LoggerFactory.getLogger(AbstractScalingClient.class); + private Map sharedCache = new ConcurrentLRUMap(1000); + private PrincipalValidatorResolver principalValidatorResolver = new PrincipalValidatorResolverImpl(); + private int totalContentItems; + private Map propertyMap; + + public ContentCreateClient(int totalContentItems, StorageClientPool clientPool, + Configuration configuration, Map propertyMap) + throws ClientPoolException, StorageClientException, AccessDeniedException { + super(clientPool, configuration); + this.propertyMap = propertyMap; + this.totalContentItems = totalContentItems; + } + + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="DLS_DEAD_LOCAL_STORE",justification="Its a test, so not used.") + public void run() { + try { + super.setup(); + AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration, null); + User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); + + AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl( + client, currentUser, configuration, sharedCache, new LoggingStorageListener(), + principalValidatorResolver); + + @SuppressWarnings("unused") + AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, + null, client, configuration, accessControlManagerImpl, sharedCache, + new LoggingStorageListener()); + + ContentManagerImpl contentManagerImpl = new ContentManagerImpl(client, + accessControlManagerImpl, configuration, sharedCache, + new LoggingStorageListener(true)); + + String basePath = String.valueOf(System.currentTimeMillis()); + long s = System.currentTimeMillis(); + long s100 = s; + Content baseContent = new Content(basePath, propertyMap); + contentManagerImpl.update(baseContent); + for (int i = 0; i < totalContentItems; i++) { + Content content = new Content(basePath + "/" + i, propertyMap); + contentManagerImpl.update(content); + if (i > 0 && i % 1000 == 0) { + long tn = System.currentTimeMillis(); + long t = tn - s; + long t100 = tn - s100; + s100 = tn; + LOGGER.info("Created {} items in {}, average {} ms last 1000 {} ms ", new Object[] { i, t, + ((double) t / (double) i), ((double) t100/(double)1000) }); + } + } + long t = System.currentTimeMillis() - s; + LOGGER.info("Created {} items in {}, each item {} ms ", new Object[] { + totalContentItems, t, ((double) t / (double) totalContentItems) }); + for (int i = 0; i < totalContentItems; i++) { + Content content = contentManagerImpl.get(basePath + "/" + i); + content.setProperty("sling:resourceType", "somethingelse"); + contentManagerImpl.update(content); + if (i > 0 && i % 1000 == 0) { + long tn = System.currentTimeMillis(); + t = tn - s; + long t100 = tn - s100; + s100 = tn; + LOGGER.info("Updated {} items in {}, average {} ms last 1000 {} ms ", new Object[] { i, t, + ((double) t / (double) i), ((double) t100/(double)1000) }); + } + } + t = System.currentTimeMillis() - s; + LOGGER.info("Updated {} items in {}, each item {} ms ", new Object[] { + totalContentItems, t, ((double) t / (double) totalContentItems) }); + + Content parent = contentManagerImpl.get(basePath); + s = System.currentTimeMillis(); + Iterable i = parent.listChildPaths(); + t = System.currentTimeMillis(); + LOGGER.info("Getting Child iterable took {} ms ", (t-s)); + s = t; + Iterator iterator = i.iterator(); + t = System.currentTimeMillis(); + LOGGER.info("Getting Child iterator took {} ms ", (t-s)); + s = t; + int n = 0; + while(iterator.hasNext()) { + @SuppressWarnings("unused") + String p = iterator.next(); + if ( n == 0 ) { + t = System.currentTimeMillis(); + LOGGER.info("Getting First Child took {} ms ", (t-s)); + s = t; + } + n++; + } + t = System.currentTimeMillis(); + LOGGER.info("Getting All Children took {} ms ", (t-s)); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } catch (AccessDeniedException e) { + LOGGER.error(e.getMessage(), e); + } + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/ContentCreateSoak.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/ContentCreateSoak.java new file mode 100644 index 00000000..692d0455 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/ContentCreateSoak.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.derby; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.jdbc.derby.DerbySetup; +import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; +import org.sakaiproject.nakamura.lite.soak.content.ContentCreateClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Maps; + +public class ContentCreateSoak extends AbstractSoakController { + + private static final Logger LOGGER = LoggerFactory.getLogger(ContentCreateSoak.class); + private int totalContent; + private StorageClientPool connectionPool; + private Configuration configuration; + private Map contentMap; + + public ContentCreateSoak(int totalContent, + StorageClientPool connectionPool, Configuration configuration, Map cm) { + super(totalContent); + this.configuration = configuration; + this.connectionPool = connectionPool; + this.totalContent = totalContent; + this.contentMap = cm; + } + + protected Runnable getRunnable(int nthreads) throws ClientPoolException, + StorageClientException, AccessDeniedException { + int contentPerThread = totalContent / nthreads; + return new ContentCreateClient(contentPerThread, + connectionPool, configuration, contentMap); + } + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + + int totalContent = 100000; + int nthreads = 1; + + if (argv.length > 0) { + nthreads = StorageClientUtils.getSetting(Integer.valueOf(argv[0]), nthreads); + } + if (argv.length > 1) { + totalContent = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalContent); + } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map cm = Maps.newHashMap(); + cm.put("sling:resourceType","test/resourcetype"); + cm.put("sakai:pooled-content-manager",new String[]{"a","b"}); + cm.put("sakai:type","sdfsdaggdsfgsdgsd"); + cm.put("sakai:marker","marker-marker-marker"); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + + ContentCreateSoak contentCreateSoak = new ContentCreateSoak( + totalContent, DerbySetup.getClientPool(configuration,"jdbc:derby:target/soak/db;create=true"), configuration, cm); + contentCreateSoak.launchSoak(nthreads); + contentCreateSoak.shutdown(); + + + } + + private void shutdown() { + Connection connection = null; + try { + connection = DriverManager.getConnection("jdbc:derby:target/soak/db;shutdown=true"); + } catch (SQLException e) { + // yes really see + // http://db.apache.org/derby/manuals/develop/develop15.html#HDRSII-DEVELOP-40464 + LOGGER.info("Sparse Map Content Derby Embedded instance shutdown sucessfully {}", + e.getMessage()); + } finally { + if (connection != null) { + try { + connection.close(); + } catch (SQLException e) { + LOGGER.debug( + "Very Odd, the getConnection should not have opened a connection (see DerbyDocs)," + + " but it did, and when we tried to close it we got " + + e.getMessage(), e); + } + } + } + } + + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/CreateUsersAndGroupsSoak.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/CreateUsersAndGroupsSoak.java new file mode 100644 index 00000000..b3c7f18f --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/CreateUsersAndGroupsSoak.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.derby; + +import java.io.IOException; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.jdbc.derby.DerbySetup; +import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; +import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import com.google.common.collect.Maps; + +public class CreateUsersAndGroupsSoak extends AbstractSoakController { + + private int totalUsers; + private StorageClientPool connectionPool; + private Configuration configuration; + + public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool, Configuration configuration) { + super(totalUsers); + this.configuration = configuration; + this.connectionPool = connectionPool; + this.totalUsers = totalUsers; + } + + protected Runnable getRunnable(int nthreads) throws ClientPoolException, + StorageClientException, AccessDeniedException { + int usersPerThread = totalUsers / nthreads; + return new CreateUsersAndGroupsClient(usersPerThread, connectionPool, configuration); + } + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + + int totalUsers = 1000; + int nthreads = 10; + + if (argv.length > 0) { + nthreads = StorageClientUtils.getSetting(Integer.valueOf(argv[0]), nthreads); + } + if (argv.length > 1) { + totalUsers = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalUsers); + } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + + CreateUsersAndGroupsSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsSoak( + totalUsers, DerbySetup.getClientPool(configuration,"jdbc:derby:target/soak/db;create=true"), configuration); + createUsersAndGroupsSoak.launchSoak(nthreads); + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/CreateUsersAndGroupsWithMembersSoak.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/CreateUsersAndGroupsWithMembersSoak.java new file mode 100644 index 00000000..e79e8543 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/CreateUsersAndGroupsWithMembersSoak.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.derby; + +import java.io.IOException; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.jdbc.derby.DerbySetup; +import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; +import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsWithMembersClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import com.google.common.collect.Maps; + +public class CreateUsersAndGroupsWithMembersSoak extends AbstractSoakController { + + private int totalUsers; + private StorageClientPool connectionPool; + private int totalGroups; + private Configuration configuration; + + public CreateUsersAndGroupsWithMembersSoak(int totalUsers, int totalGroups, + StorageClientPool connectionPool, Configuration configuration) { + super(totalUsers); + this.configuration = configuration; + this.connectionPool = connectionPool; + this.totalUsers = totalUsers; + this.totalGroups = totalGroups; + } + + protected Runnable getRunnable(int nthreads) throws ClientPoolException, + StorageClientException, AccessDeniedException { + int usersPerThread = totalUsers / nthreads; + int groupsPerThread = totalGroups / nthreads; + return new CreateUsersAndGroupsWithMembersClient(usersPerThread, groupsPerThread, + connectionPool, configuration); + } + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + + int totalUsers = 1000; + int totalGroups = 1000; + int nthreads = 10; + + if (argv.length > 0) { + nthreads = StorageClientUtils.getSetting(Integer.valueOf(argv[0]), nthreads); + } + if (argv.length > 1) { + totalUsers = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalUsers); + } + if (argv.length > 2) { + totalGroups = StorageClientUtils.getSetting(Integer.valueOf(argv[2]), totalUsers); + } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + + CreateUsersAndGroupsWithMembersSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsWithMembersSoak( + totalUsers, totalGroups, DerbySetup.getClientPool(configuration,"jdbc:derby:target/soak/db;create=true"), configuration); + createUsersAndGroupsSoak.launchSoak(nthreads); + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/SoakAll.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/SoakAll.java new file mode 100644 index 00000000..e9f4869d --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/derby/SoakAll.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.derby; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; + +import java.io.IOException; + +public class SoakAll { + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + CreateUsersAndGroupsSoak.main(argv); + CreateUsersAndGroupsWithMembersSoak.main(argv); + } +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsSoak.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsSoak.java similarity index 69% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsSoak.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsSoak.java index 0c79868c..69ec829b 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsSoak.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsSoak.java @@ -18,24 +18,32 @@ package org.sakaiproject.nakamura.lite.soak.memory; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +import java.io.IOException; +import java.util.Map; public class CreateUsersAndGroupsSoak extends AbstractSoakController { private int totalUsers; private StorageClientPool connectionPool; + private Configuration configuration; - public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool) { + public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool, Configuration configuration) { super(totalUsers); + this.configuration = configuration; this.connectionPool = connectionPool; this.totalUsers = totalUsers; } @@ -43,11 +51,11 @@ public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool protected Runnable getRunnable(int nthreads) throws ClientPoolException, StorageClientException, AccessDeniedException { int usersPerThread = totalUsers / nthreads; - return new CreateUsersAndGroupsClient(usersPerThread, connectionPool); + return new CreateUsersAndGroupsClient(usersPerThread, connectionPool, configuration); } public static void main(String[] argv) throws ClientPoolException, StorageClientException, - AccessDeniedException, ClassNotFoundException { + AccessDeniedException, ClassNotFoundException, IOException { int totalUsers = 1000; int nthreads = 10; @@ -58,16 +66,24 @@ public static void main(String[] argv) throws ClientPoolException, StorageClient if (argv.length > 1) { totalUsers = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalUsers); } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); CreateUsersAndGroupsSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsSoak( - totalUsers, getConnectionPool()); + totalUsers, getConnectionPool(configuration), configuration); createUsersAndGroupsSoak.launchSoak(nthreads); } - protected static StorageClientPool getConnectionPool() throws ClassNotFoundException { + protected static StorageClientPool getConnectionPool(Configuration configuration) throws ClassNotFoundException { MemoryStorageClientPool cp = new MemoryStorageClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsWithMembersSoak.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsWithMembersSoak.java similarity index 71% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsWithMembersSoak.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsWithMembersSoak.java index b765e55a..e00a4947 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsWithMembersSoak.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/CreateUsersAndGroupsWithMembersSoak.java @@ -18,26 +18,34 @@ package org.sakaiproject.nakamura.lite.soak.memory; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsWithMembersClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +import java.io.IOException; +import java.util.Map; public class CreateUsersAndGroupsWithMembersSoak extends AbstractSoakController { private int totalUsers; private StorageClientPool connectionPool; private int totalGroups; + private Configuration configuration; public CreateUsersAndGroupsWithMembersSoak(int totalUsers, int totalGroups, - StorageClientPool connectionPool) { + StorageClientPool connectionPool, Configuration configuration) { super(totalUsers + (totalGroups * 5)); + this.configuration = configuration; this.connectionPool = connectionPool; this.totalUsers = totalUsers; this.totalGroups = totalGroups; @@ -48,11 +56,11 @@ protected Runnable getRunnable(int nthreads) throws ClientPoolException, int usersPerThread = totalUsers / nthreads; int groupsPerThread = totalGroups / nthreads; return new CreateUsersAndGroupsWithMembersClient(usersPerThread, groupsPerThread, - connectionPool); + connectionPool, configuration); } public static void main(String[] argv) throws ClientPoolException, StorageClientException, - AccessDeniedException, ClassNotFoundException { + AccessDeniedException, ClassNotFoundException, IOException { int totalUsers = 1000; int totalGroups = 100; @@ -67,16 +75,24 @@ public static void main(String[] argv) throws ClientPoolException, StorageClient if (argv.length > 2) { totalGroups = StorageClientUtils.getSetting(Integer.valueOf(argv[2]), totalUsers); } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); CreateUsersAndGroupsWithMembersSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsWithMembersSoak( - totalUsers, totalGroups, getConnectionPool()); + totalUsers, totalGroups, getConnectionPool(configuration), configuration); createUsersAndGroupsSoak.launchSoak(nthreads); } - protected static StorageClientPool getConnectionPool() throws ClassNotFoundException { + protected static StorageClientPool getConnectionPool(Configuration configuration) throws ClassNotFoundException { MemoryStorageClientPool cp = new MemoryStorageClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/SoakAll.java b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/SoakAll.java similarity index 92% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/memory/SoakAll.java rename to core/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/SoakAll.java index 8ff9073b..2e803962 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/SoakAll.java +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/soak/memory/SoakAll.java @@ -21,10 +21,12 @@ import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import java.io.IOException; + public class SoakAll { public static void main(String[] argv) throws ClientPoolException, StorageClientException, - AccessDeniedException, ClassNotFoundException { + AccessDeniedException, ClassNotFoundException, IOException { CreateUsersAndGroupsSoak.main(argv); CreateUsersAndGroupsWithMembersSoak.main(argv); } diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/DependencySequenceTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/DependencySequenceTest.java new file mode 100644 index 00000000..3da12ed3 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/DependencySequenceTest.java @@ -0,0 +1,157 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.util.List; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.PropertyMigrator; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; + +public class DependencySequenceTest { + + private static final Map EMPTY_MAP = ImmutableMap.of(); + private static final String[] EMPTY_STRING = new String[0]; + private static final Map EMPTY_STRING_MAP = ImmutableMap.of(); + + @Test + public void testEmptySequence() { + PropertyMigrator[] propertyMigrators = new PropertyMigrator[0]; + DependencySequence dependencySequence = new DependencySequence(propertyMigrators, EMPTY_MAP); + Assert.assertFalse(dependencySequence.hasUnresolved()); + Assert.assertEquals(0, dependencySequence.getUnresolved().size()); + Assert.assertEquals(0, getSize(dependencySequence)); + Assert.assertEquals(0, getSize(dependencySequence.getAlreadyRun())); + } + + @Test + public void testResolvableSequence() { + PropertyMigrator[] propertyMigrators = new PropertyMigrator[] { + new TPropertyMigrator("test1", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test2", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test3", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test4", EMPTY_STRING, EMPTY_STRING_MAP) }; + DependencySequence dependencySequence = new DependencySequence(propertyMigrators, EMPTY_MAP); + Assert.assertFalse(dependencySequence.hasUnresolved()); + Assert.assertEquals(0, dependencySequence.getUnresolved().size()); + Assert.assertArrayEquals(propertyMigrators, getArray(dependencySequence)); + Assert.assertEquals(0, getSize(dependencySequence.getAlreadyRun())); + } + + @Test + public void testUnResolvableSequence() { + PropertyMigrator[] propertyMigrators = new PropertyMigrator[] { + new TPropertyMigrator("test1", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test2", new String[] { "test22" }, EMPTY_STRING_MAP), + new TPropertyMigrator("test3", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test4", EMPTY_STRING, EMPTY_STRING_MAP) }; + DependencySequence dependencySequence = new DependencySequence(propertyMigrators, EMPTY_MAP); + Assert.assertTrue(dependencySequence.hasUnresolved()); + Assert.assertArrayEquals(new PropertyMigrator[] { propertyMigrators[1] }, + getArray(dependencySequence.getUnresolved())); + Assert.assertArrayEquals(new PropertyMigrator[0], getArray(dependencySequence)); + Assert.assertEquals(0, getSize(dependencySequence.getAlreadyRun())); + } + + @Test + public void testResolvableAlreadyRunSequence() { + PropertyMigrator[] propertyMigrators = new PropertyMigrator[] { + new TPropertyMigrator("test1", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test2", new String[] { "test22" }, EMPTY_STRING_MAP), + new TPropertyMigrator("test3", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test4", EMPTY_STRING, EMPTY_STRING_MAP) }; + DependencySequence dependencySequence = new DependencySequence(propertyMigrators, + ImmutableMap.of("test22", (Object) "122312312;0")); + Assert.assertFalse(dependencySequence.hasUnresolved()); + Assert.assertArrayEquals(new PropertyMigrator[0], + getArray(dependencySequence.getUnresolved())); + Assert.assertArrayEquals(propertyMigrators, getArray(dependencySequence)); + Assert.assertArrayEquals(new String[] { "test22" }, getArray(dependencySequence + .getAlreadyRun().keySet())); + } + + @Test + public void testResolvableAlreadyRunUnspecifiedSequence() { + PropertyMigrator[] propertyMigrators = new PropertyMigrator[] { + new TPropertyMigrator("test1", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test2", new String[] { "test22" }, EMPTY_STRING_MAP), + new TPropertyMigrator("test3", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test4", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test22", EMPTY_STRING, EMPTY_STRING_MAP) }; + DependencySequence dependencySequence = new DependencySequence(propertyMigrators, + ImmutableMap.of("test22", (Object) "122312312;0")); + Assert.assertArrayEquals(new String[] { "test22" }, getArray(dependencySequence + .getAlreadyRun().keySet())); + Assert.assertArrayEquals(new PropertyMigrator[0], + getArray(dependencySequence.getUnresolved())); + Assert.assertArrayEquals(new PropertyMigrator[] { + propertyMigrators[0], propertyMigrators[2], propertyMigrators[3], propertyMigrators[4], propertyMigrators[1] + }, getArray(dependencySequence)); + } + + @Test + public void testResolvableAlreadyRunOnceSequence() { + PropertyMigrator[] propertyMigrators = new PropertyMigrator[] { + new TPropertyMigrator("test1", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test2", new String[] { "test22" }, EMPTY_STRING_MAP), + new TPropertyMigrator("test3", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test4", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test22", EMPTY_STRING, ImmutableMap.of( + PropertyMigrator.OPTION_RUNONCE, "false")) }; + DependencySequence dependencySequence = new DependencySequence(propertyMigrators, + ImmutableMap.of("test22", (Object) "122312312;0")); + Assert.assertFalse(dependencySequence.hasUnresolved()); + Assert.assertArrayEquals(new PropertyMigrator[0], + getArray(dependencySequence.getUnresolved())); + Assert.assertArrayEquals(new PropertyMigrator[] { + propertyMigrators[0], propertyMigrators[2], propertyMigrators[3], propertyMigrators[4], propertyMigrators[1] + }, getArray(dependencySequence)); + Assert.assertArrayEquals(new String[] { "test22" }, getArray(dependencySequence + .getAlreadyRun().keySet())); + } + + @Test + public void testResolvableAlreadyRunOnlyOnceSequence() { + PropertyMigrator[] propertyMigrators = new PropertyMigrator[] { + new TPropertyMigrator("test1", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test2", new String[] { "test22" }, EMPTY_STRING_MAP), + new TPropertyMigrator("test3", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test4", EMPTY_STRING, EMPTY_STRING_MAP), + new TPropertyMigrator("test22", EMPTY_STRING, ImmutableMap.of( + PropertyMigrator.OPTION_RUNONCE, "true")) }; + DependencySequence dependencySequence = new DependencySequence(propertyMigrators, + ImmutableMap.of("test22", (Object) "122312312;0")); + Assert.assertFalse(dependencySequence.hasUnresolved()); + Assert.assertArrayEquals(new PropertyMigrator[0], + getArray(dependencySequence.getUnresolved())); + Assert.assertArrayEquals((new PropertyMigrator[] { propertyMigrators[0], + propertyMigrators[1], propertyMigrators[2], propertyMigrators[3] }), + getArray(dependencySequence)); + Assert.assertArrayEquals(new String[] { "test22" }, getArray(dependencySequence + .getAlreadyRun().keySet())); + } + + @SuppressWarnings("unchecked") + private T[] getArray(Iterable iterator) { + List l = Lists.newArrayList(); + for (T t : iterator) { + l.add(t); + } + return (T[]) l.toArray(); + } + + private int getSize(Iterable iterable) { + int i = 0; + for (@SuppressWarnings("unused") T t : iterable) { + i++; + } + return i; + } + + private int getSize(Map alreadyRun) { + return alreadyRun.size(); + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/FileRedoLoggerTest.java b/core/src/test/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/FileRedoLoggerTest.java new file mode 100644 index 00000000..8118befa --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/FileRedoLoggerTest.java @@ -0,0 +1,72 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.io.File; +import java.io.IOException; +import java.text.MessageFormat; + +import org.junit.Assert; +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.Feedback; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; + +public class FileRedoLoggerTest { + + protected static final Logger LOGGER = LoggerFactory.getLogger(FileRedoLoggerTest.class); + protected int files; + + @Test + public void testFileRedoLogger() throws IOException { + FileRedoLogger logger = new FileRedoLogger("target/redoloaTest", 1024, new Feedback() { + + private File lastFile; + + public void log(String format, Object... params) { + LOGGER.info(MessageFormat.format(format, params)); + } + + public void exception(Throwable e) { + LOGGER.warn(e.getMessage(),e); + } + + public void newLogFile(File currentFile) { + if ( lastFile != null ) { + LOGGER.info("Last File size {} ",lastFile.length()); + if ( lastFile.length() < 1024) { + Assert.fail("File was not big enough"); + } + } + LOGGER.info("New Log File {} ",currentFile.getAbsoluteFile()); + lastFile = currentFile; + files++; + } + + public void progress(boolean dryRun, long done, long toDo) { + LOGGER.info("DryRun:{} {}% remaining {} ", new Object[] { dryRun, + ((done * 100) / toDo), toDo - done }); + + } + }); + + for ( int i = 0; i < 1000; i++ ) { + logger.begin(); + logger.before("n", "cf", "key", ImmutableMap.of("before", (Object)"before")); + logger.after("n", "cf", "key", ImmutableMap.of("after", (Object)"after")); + logger.delete("n", "cf", "k2"); + if ( i % 10 == 0) { + logger.rollback(); + } else if ( i % 11 != 0 ) { + logger.commit(); + } + } + Assert.assertTrue(files > 0); + logger.close(); + + FileRedoReader reader = new FileRedoReader(logger.getLocation()); + reader.analyse(); + + + } +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/TPropertyMigrator.java b/core/src/test/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/TPropertyMigrator.java new file mode 100644 index 00000000..827e9315 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/storage/jdbc/migrate/TPropertyMigrator.java @@ -0,0 +1,40 @@ +package org.sakaiproject.nakamura.lite.storage.jdbc.migrate; + +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.PropertyMigrator; + +public class TPropertyMigrator implements PropertyMigrator { + + private String name; + private String[] dependencies; + private Map options; + + public TPropertyMigrator(String name, String[] dependencies, Map options) { + this.name = name; + this.dependencies = dependencies; + this.options = options; + } + + public boolean migrate(String rid, Map properties) { + return false; + } + + public String[] getDependencies() { + return dependencies; + } + + public String getName() { + return name; + } + + @Override + public String toString() { + return name; + } + + public Map getOptions() { + return options; + } + +} diff --git a/core/src/test/java/org/sakaiproject/nakamura/lite/storage/spi/types/TestTypes.java b/core/src/test/java/org/sakaiproject/nakamura/lite/storage/spi/types/TestTypes.java new file mode 100644 index 00000000..a7f6cdd6 --- /dev/null +++ b/core/src/test/java/org/sakaiproject/nakamura/lite/storage/spi/types/TestTypes.java @@ -0,0 +1,278 @@ +package org.sakaiproject.nakamura.lite.storage.spi.types; + +import com.google.common.collect.Maps; + + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.sakaiproject.nakamura.lite.storage.spi.types.LongString; +import org.sakaiproject.nakamura.lite.storage.spi.types.StringType; +import org.sakaiproject.nakamura.lite.storage.spi.types.Type; +import org.sakaiproject.nakamura.lite.storage.spi.types.Types; + +import java.io.IOException; +import java.io.InputStream; +import java.math.BigDecimal; +import java.util.Calendar; +import java.util.Map; +import java.util.TimeZone; + +public class TestTypes { + + + private static final String SHORTSTRING = "AShortString"; + private static final String LONGSTRING = "Longer than "+SHORTSTRING; + + @Before + public void before() { + LongString.setBase("target/longstringstore"); + } + + @Test + public void testTypes() { + Map> typeById = Types.getTypeByIdMap(); + Assert.assertNotNull(typeById); + Map, Type> typeByClass = Types.getTypeMap(); + Assert.assertNotNull(typeByClass); + } + + @Test + public void testWriteTypes() throws IOException { + Map map = Maps.newHashMap(); + map.put("A", 1); + map.put("B", Long.MAX_VALUE); + map.put("C", "String"); + map.put("D", new BigDecimal("12345.12E23")); + Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("BST")); + cal.setTimeInMillis(System.currentTimeMillis()); + map.put("E", cal); + map.put("F", (double)0.1); + map.put("G", true); + map.put("H", false); + map.put("J", null); + + InputStream in = Types.storeMapToStream("testkey", map, "testcf"); + Map output = Maps.newHashMap(); + Types.loadFromStream("testkey", output, in, "testcf"); + + Integer a = (Integer) output.get("A"); + Assert.assertNotNull(a); + Assert.assertEquals(1, a.intValue()); + Long b = (Long) output.get("B"); + Assert.assertNotNull(b); + Assert.assertEquals(Long.MAX_VALUE, b.longValue()); + String c = (String) output.get("C"); + Assert.assertNotNull(c); + Assert.assertEquals("String", c); + BigDecimal d = (BigDecimal) output.get("D"); + Assert.assertNotNull(d); + Assert.assertEquals(new BigDecimal("12345.12E23"), d); + Calendar e = (Calendar) output.get("E"); + Assert.assertNotNull(e); + Assert.assertEquals(cal, e); + Assert.assertEquals(cal.getTimeInMillis(), e.getTimeInMillis()); + Assert.assertEquals(cal.getTimeZone(), e.getTimeZone()); + Double f = (Double) output.get("F"); + Assert.assertNotNull(f); + Assert.assertEquals((double)0.1, f.doubleValue(),0.0); + Boolean g = (Boolean) output.get("G"); + Assert.assertNotNull(g); + Assert.assertTrue(g.booleanValue()); + Boolean h = (Boolean) output.get("H"); + Assert.assertNotNull(h); + Assert.assertFalse(h.booleanValue()); + Object j = output.get("J"); + Assert.assertNull(j); + + + + } + + @Test + public void testWriteTypesWrongFamily() throws IOException { + Map map = Maps.newHashMap(); + map.put("A", 1); + map.put("B", Long.MAX_VALUE); + map.put("C", "String"); + map.put("D", new BigDecimal("12345.12E23")); + Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("BST")); + cal.setTimeInMillis(System.currentTimeMillis()); + map.put("E", cal); + map.put("F", (double)0.1); + map.put("G", true); + map.put("H", false); + map.put("J", null); + InputStream in = Types.storeMapToStream("testkey", map, "testcf"); + Map output = Maps.newHashMap(); + try { + Types.loadFromStream("testkey", output, in, "not-testcf"); + org.junit.Assert.fail(); + } catch ( IOException e ) { + // Ok + } + } + + @Test + public void testWriteArrayTypes() throws IOException { + Map map = Maps.newHashMap(); + map.put("A", new Integer[]{1,2}); + map.put("B", new Long[]{Long.MIN_VALUE,Long.MAX_VALUE}); + map.put("C", new String[]{"StringA","StringB"}); + map.put("D", new BigDecimal[]{new BigDecimal("12345.12E23"),new BigDecimal("12345.12E21")}); + Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("BST")); + cal.setTimeInMillis(System.currentTimeMillis()); + Calendar cal2 = Calendar.getInstance(TimeZone.getTimeZone("PST")); + cal2.setTimeInMillis(System.currentTimeMillis()); + map.put("E", new Calendar[]{cal,cal2}); + map.put("F", new Double[]{0.1,0.2}); + map.put("G", new Boolean[]{true,false}); + map.put("H", new Boolean[]{false,true}); + map.put("I", new boolean[]{true, true}); + map.put("J", new long[]{5, 10}); + map.put("K", new int[] {1,2}); + map.put("L", new double[] {1.1, 3.14}); + + InputStream in = Types.storeMapToStream("testkey", map, "testcf"); + Map output = Maps.newHashMap(); + Types.loadFromStream("testkey", output, in, "testcf"); + + Integer[] a = (Integer[]) output.get("A"); + Assert.assertNotNull(a); + Assert.assertEquals(2, a.length); + Assert.assertEquals(1, a[0].intValue()); + Assert.assertEquals(2, a[1].intValue()); + Long[] b = (Long[]) output.get("B"); + Assert.assertNotNull(b); + Assert.assertEquals(2, b.length); + Assert.assertEquals(Long.MIN_VALUE, b[0].longValue()); + Assert.assertEquals(Long.MAX_VALUE, b[1].longValue()); + String[] c = (String[]) output.get("C"); + Assert.assertNotNull(c); + Assert.assertEquals(2, c.length); + Assert.assertEquals("StringA", c[0]); + Assert.assertEquals("StringB", c[1]); + BigDecimal[] d = (BigDecimal[]) output.get("D"); + Assert.assertNotNull(d); + Assert.assertEquals(2, d.length); + Assert.assertEquals(new BigDecimal("12345.12E23"), d[0]); + Assert.assertEquals(new BigDecimal("12345.12E21"), d[1]); + Calendar[] e = (Calendar[]) output.get("E"); + Assert.assertNotNull(e); + Assert.assertEquals(2, e.length); + Assert.assertEquals(cal, e[0]); + Assert.assertEquals(cal.getTimeInMillis(), e[0].getTimeInMillis()); + Assert.assertEquals(cal.getTimeZone(), e[0].getTimeZone()); + Assert.assertEquals(cal2, e[1]); + Assert.assertEquals(cal2.getTimeInMillis(), e[1].getTimeInMillis()); + Assert.assertEquals(cal2.getTimeZone(), e[1].getTimeZone()); + Double[] f = (Double[]) output.get("F"); + Assert.assertNotNull(f); + Assert.assertEquals(2, f.length); + Assert.assertEquals(0.1, f[0],0.0); + Assert.assertEquals(0.2, f[1],0.0); + Boolean[] g = (Boolean[]) output.get("G"); + Assert.assertNotNull(g); + Assert.assertEquals(2, g.length); + Assert.assertTrue(g[0]); + Assert.assertFalse(g[1]); + Boolean[] h = (Boolean[]) output.get("H"); + Assert.assertNotNull(h); + Assert.assertEquals(2, h.length); + Assert.assertFalse(h[0]); + Assert.assertTrue(h[1]); + Boolean[] i = (Boolean[]) output.get("I"); + Assert.assertNotNull(i); + Assert.assertEquals(true, i[0]); + Assert.assertEquals(true, i[1]); + Long[] j = (Long[]) output.get("J"); + Assert.assertEquals(5, j[0].longValue()); + Assert.assertEquals(10, j[1].longValue()); + Integer[] k = (Integer[]) output.get("K"); + Assert.assertEquals(1, k[0].intValue()); + Assert.assertEquals(2, k[1].intValue()); + Double[] l = (Double[]) output.get("L"); + Assert.assertEquals(1.1, l[0], 0.01); + Assert.assertEquals(3.14, l[1], 0.01); + } + @Test + public void testWriteEmptyArrayTypes() throws IOException { + Map map = Maps.newHashMap(); + map.put("A", new Integer[]{}); + map.put("B", new Long[]{}); + map.put("C", new String[]{}); + map.put("D", new BigDecimal[]{}); + map.put("E", new Calendar[]{}); + map.put("F", new Double[]{}); + map.put("G", new Boolean[]{}); + map.put("H", new Boolean[]{}); + + InputStream in = Types.storeMapToStream("testkey", map,"testcf"); + Map output = Maps.newHashMap(); + Types.loadFromStream("testkey", output, in, "testcf"); + + Integer[] a = (Integer[]) output.get("A"); + Assert.assertNotNull(a); + Assert.assertEquals(0, a.length); + Long[] b = (Long[]) output.get("B"); + Assert.assertNotNull(b); + Assert.assertEquals(0, b.length); + String[] c = (String[]) output.get("C"); + Assert.assertNotNull(c); + Assert.assertEquals(0, c.length); + BigDecimal[] d = (BigDecimal[]) output.get("D"); + Assert.assertNotNull(d); + Assert.assertEquals(0, d.length); + Calendar[] e = (Calendar[]) output.get("E"); + Assert.assertNotNull(e); + Assert.assertEquals(0, e.length); + Double[] f = (Double[]) output.get("F"); + Assert.assertNotNull(f); + Assert.assertEquals(0, f.length); + Boolean[] g = (Boolean[]) output.get("G"); + Assert.assertNotNull(g); + Assert.assertEquals(0, g.length); + Boolean[] h = (Boolean[]) output.get("H"); + Assert.assertNotNull(h); + Assert.assertEquals(0, h.length); + + + + } + + + @Test + public void testStringToLongString() throws IOException { + Map map = Maps.newHashMap(); + map.put("short", SHORTSTRING); + map.put("long", LONGSTRING); + map.put("shortarray", new String[]{ SHORTSTRING, SHORTSTRING, SHORTSTRING}); + map.put("longarray", new String[]{ SHORTSTRING, SHORTSTRING, LONGSTRING, LONGSTRING}); + + StringType.setLengthLimit(LONGSTRING.length() -1); + + InputStream in = Types.storeMapToStream("testkey", map,"testcf"); + Map output = Maps.newHashMap(); + Types.loadFromStream("testkey", output, in, "testcf"); + + Assert.assertEquals(String.class, output.get("short").getClass()); + Assert.assertEquals(LongString.class, output.get("long").getClass()); + Assert.assertEquals(String[].class, output.get("shortarray").getClass()); + Assert.assertEquals(LongString[].class, output.get("longarray").getClass()); + + Assert.assertEquals(SHORTSTRING, output.get("short")); + Assert.assertEquals(LONGSTRING, ((LongString)output.get("long")).toString()); + Assert.assertArrayEquals(new String[]{SHORTSTRING, SHORTSTRING, SHORTSTRING}, (String[])output.get("shortarray")); + LongString[] longStringArray = (LongString[]) output.get("longarray"); + Assert.assertEquals(longStringArray.length, 4); + Assert.assertEquals(SHORTSTRING, longStringArray[0].toString()); + Assert.assertEquals(SHORTSTRING, longStringArray[1].toString()); + Assert.assertEquals(LONGSTRING, longStringArray[2].toString()); + Assert.assertEquals(LONGSTRING, longStringArray[3].toString()); + + + StringType.setLengthLimit(0); + + } + +} diff --git a/core/src/test/resources/testsharedoverride.properties b/core/src/test/resources/testsharedoverride.properties new file mode 100644 index 00000000..639a4e1b --- /dev/null +++ b/core/src/test/resources/testsharedoverride.properties @@ -0,0 +1,3 @@ + + +index-column-names = somethingElseFromProperties \ No newline at end of file diff --git a/drivers/README.textile b/drivers/README.textile new file mode 100644 index 00000000..a0709f2b --- /dev/null +++ b/drivers/README.textile @@ -0,0 +1,17 @@ +h1. Map Content System Drivers. + +This subtree contains Storage SPI implementations that are needed to run SparseMap. They are OSGi Fragments and one of them must be included in the OSGi Container when sparsemap is started. + +derby/, mysql/, postgres/, oracle/ + + These projects contain JDBC based drivers for each database. The Fragment contains a configuration + for the database in question, and service and the JDBC driver. The code for the driver is contained in the core bundle + +cassandra/ + The bundle contains an implemetnation of the Cassandra SPI implementation and the necessary depedencies to connect to an existing Cassandra cluster. + +mongo/ + The bundle contains an implemetnation of the MongoDB SPI implementation and the necessary depedencies to connect to an existing MongoDB cluster. + +hbase/ + The bundle contains an implemetnation of the HBase SPI implementation and the necessary depedencies to connect to an existing HBase cluster. diff --git a/drivers/cassandra/pom.xml b/drivers/cassandra/pom.xml new file mode 100644 index 00000000..ec739deb --- /dev/null +++ b/drivers/cassandra/pom.xml @@ -0,0 +1,98 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 4 + parent/pom.xml + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.cassandra-driver + bundle + 0.1-SNAPSHOT + Sakai Nakamura :: Cassandra SPI Implementation + Storage SPI implementation using Cassandra + + scm:git:git://github.com/sakaiproject/sparsemapcontent.git + scm:git:git@github.com:sakaiproject/sparsemapcontent.git + http://github.com/sakaiproject/sparsemapcontent/ + + + UTF-8 + + + + + org.apache.felix + maven-bundle-plugin + true + + + driver + !* + !* + + org.sakaiproject.nakamura.core + org.sakaiproject.nakamura.lite.storage.cassandra.* + + + + + + + + + + org.apache.cassandra.deps + libthrift + 0.5.0 + + + org.apache.cassandra.thrift + apache-cassandra + 0.6.5 + + + org.apache.felix + org.apache.felix.scr.annotations + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5.1-SNAPSHOT + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + tests + 1.5.1-SNAPSHOT + test + + + org.slf4j + slf4j-api + 1.5.10 + + + org.slf4j + slf4j-simple + 1.5.10 + test + + + junit + junit + 4.4 + test + + + findbugs + annotations + 1.0.0 + provided + + + + diff --git a/drivers/cassandra/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClient.java b/drivers/cassandra/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClient.java new file mode 100644 index 00000000..b302e7cc --- /dev/null +++ b/drivers/cassandra/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClient.java @@ -0,0 +1,509 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.cassandra; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +import org.apache.cassandra.thrift.Cassandra.Client; +import org.apache.cassandra.thrift.Column; +import org.apache.cassandra.thrift.ColumnOrSuperColumn; +import org.apache.cassandra.thrift.ColumnParent; +import org.apache.cassandra.thrift.ColumnPath; +import org.apache.cassandra.thrift.ConsistencyLevel; +import org.apache.cassandra.thrift.Deletion; +import org.apache.cassandra.thrift.InvalidRequestException; +import org.apache.cassandra.thrift.Mutation; +import org.apache.cassandra.thrift.SlicePredicate; +import org.apache.cassandra.thrift.SliceRange; +import org.apache.cassandra.thrift.TimedOutException; +import org.apache.cassandra.thrift.UnavailableException; +import org.apache.thrift.TException; +import org.apache.thrift.protocol.TProtocol; +import org.apache.thrift.transport.TSocket; +import org.apache.thrift.transport.TTransportException; +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.sakaiproject.nakamura.lite.storage.spi.Disposable; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.Disposer; +import org.sakaiproject.nakamura.lite.storage.spi.SparseRow; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientListener; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockSetContentHelper; +import org.sakaiproject.nakamura.lite.storage.spi.types.Types; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Map.Entry; + +public class CassandraClient extends Client implements StorageClient { + + private static final Logger LOGGER = LoggerFactory.getLogger(CassandraClient.class); + public static final String CONFIG_BLOCK_SIZE = "block-size"; + public static final String CONFIG_MAX_CHUNKS_PER_BLOCK = "chunks-per-block"; + + private static final int DEFAULT_BLOCK_SIZE = 1024 * 1024; + private static final int DEFAULT_MAX_CHUNKS_PER_BLOCK = 64; + private static final String INDEX_COLUMN_FAMILY = "smcindex"; + + + private TSocket tSocket; + private BlockContentHelper contentHelper; + private int blockSize; + private int maxChunksPerBlockSet; + private CassandraClientPool pool; + + private Set indexColumns; + + private boolean active; + private List toDispose = Lists.newArrayList(); + public List> tResultRows; + + + public CassandraClient(CassandraClientPool pool, TProtocol tProtocol, TSocket tSocket, + Map properties, Set indexColums) { + super(tProtocol); + this.indexColumns = indexColums; + this.tSocket = tSocket; + this.pool = pool; + contentHelper = new BlockSetContentHelper(this); + blockSize = StorageClientUtils.getSetting(properties.get(CONFIG_BLOCK_SIZE), + DEFAULT_BLOCK_SIZE); + maxChunksPerBlockSet = StorageClientUtils.getSetting( + properties.get(CONFIG_MAX_CHUNKS_PER_BLOCK), DEFAULT_MAX_CHUNKS_PER_BLOCK); + } + + public void close() { + pool.releaseClient(this); + } + + public void destroy() { + try { + if (tSocket.isOpen()) { + tSocket.flush(); + tSocket.close(); + } + } catch (TTransportException e) { + LOGGER.error("Failed to close the connection to the cassandra store.", e); + } + } + + public void passivate() { + } + + public void activate() { + } + + public void validate() throws TException { + describe_version(); + } + + public Map get(String keySpace, String columnFamily, String key) + throws StorageClientException { + Map row = new HashMap(); + try { + SlicePredicate predicate = new SlicePredicate(); + SliceRange sliceRange = new SliceRange(); + sliceRange.setStart(new byte[0]); + sliceRange.setFinish(new byte[0]); + predicate.setSlice_range(sliceRange); + + ColumnParent parent = new ColumnParent(columnFamily); + List results = get_slice(keySpace, key, parent, predicate,ConsistencyLevel.ONE); + + for (ColumnOrSuperColumn result : results) { + if (result.isSetSuper_column()) { + Map sc = new HashMap(); + + for (Column column : result.super_column.columns) { + Object columnValue = Types.toObject(column.value); + sc.put(new String(column.name, "UTF-8"), columnValue); + } + row.put(new String(result.super_column.name, "UTF-8"), sc); + } else { + row.put(new String(result.column.name, "UTF-8"), + Types.toObject(result.column.value)); + } + } + + } catch (InvalidRequestException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (UnavailableException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (TimedOutException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (TException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (IOException e) { + LOGGER.debug(e.getMessage()); + } + return row; + } + + public void insert(String keySpace, String columnFamily, String key, Map values, boolean probablyNew) + throws StorageClientException { + try { + Map>> mutation = new HashMap>>(); + Map> columnMutations = new HashMap>(); + LOGGER.debug("Saving changes to {}:{}:{} ", + new Object[] { keySpace, columnFamily, key }); + List keyMutations = Lists.newArrayList(); + columnMutations.put(columnFamily, keyMutations); + mutation.put(key, columnMutations); + // TODO We need to handle deletions correctly. + // If the deleted flag is set we should probably just delete completely. + for (Entry value : values.entrySet()) { + String name = value.getKey(); + byte[] bname=null; + try { + bname = name.getBytes("UTF-8"); + } catch (UnsupportedEncodingException e1) { + LOGGER.debug(e1.getMessage()); + } + Object v = value.getValue(); + if (v instanceof RemoveProperty) { + Deletion deletion = new Deletion(); + SlicePredicate deletionPredicate = new SlicePredicate(); + deletionPredicate.addToColumn_names(bname); + deletion.setPredicate(deletionPredicate); + Mutation mu = new Mutation(); + mu.setDeletion(deletion); + keyMutations.add(mu); + } + else { + try{ + byte b[]=Types.toByteArray(v); + Column column = new Column(bname, b, System.currentTimeMillis()); + ColumnOrSuperColumn csc = new ColumnOrSuperColumn(); + csc.setColumn(column); + Mutation mu = new Mutation(); + mu.setColumn_or_supercolumn(csc); + keyMutations.add(mu); + + if((!columnFamily.equals(INDEX_COLUMN_FAMILY))&&shouldIndex(keySpace, columnFamily, name)) { + addIndex(keySpace,columnFamily,key,bname,b); + } + + } + catch(IOException e) + { + LOGGER.debug("IOException. Stack trace:",e); + } + } + } + LOGGER.debug("Mutation {} ", mutation); + batch_mutate(keySpace, mutation, ConsistencyLevel.ONE); + } catch (InvalidRequestException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (UnavailableException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (TimedOutException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (TException e) { + throw new StorageClientException(e.getMessage(), e); + } + } + + public void remove(String keySpace, String columnFamily, String key) + throws StorageClientException { + if(!columnFamily.equals(INDEX_COLUMN_FAMILY)){ + Map indexRow = null; + Map row = get(keySpace, columnFamily, key); + + for (Entry value : row.entrySet()) { + try { + String columnname = value.getKey(); + String columnvalue = null; + columnvalue = new String(Types.toByteArray(value.getValue())); + columnvalue=StorageClientUtils.insecureHash(columnvalue); + String indexKey=columnname+":"+INDEX_COLUMN_FAMILY+":"+columnvalue; + indexRow=get(keySpace,INDEX_COLUMN_FAMILY,indexKey); + indexRow.remove(key); + remove(keySpace,INDEX_COLUMN_FAMILY,indexKey); + insert(keySpace,INDEX_COLUMN_FAMILY,indexKey,indexRow,true); + } catch (IOException e) { + LOGGER.debug("IOException. ",e); + } + } + } + + ColumnPath cp = new ColumnPath(columnFamily); + try { + remove(keySpace, key, cp, System.currentTimeMillis(), ConsistencyLevel.ONE); + } catch (InvalidRequestException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (UnavailableException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (TimedOutException e) { + throw new StorageClientException(e.getMessage(), e); + } catch (TException e) { + throw new StorageClientException(e.getMessage(), e); + } + } + + public Map streamBodyIn(String keySpace, String contentColumnFamily, + String contentId, String contentBlockId, String streamId, Map content, InputStream in) + throws StorageClientException, AccessDeniedException, IOException { + return contentHelper.writeBody(keySpace, contentColumnFamily, contentId, contentBlockId, streamId, + blockSize, maxChunksPerBlockSet, in); + } + + public InputStream streamBodyOut(String keySpace, String contentColumnFamily, String contentId, + String contentBlockId, String streamId, Map content) throws StorageClientException, + AccessDeniedException { + + int nBlocks = StorageClientUtils.toInt(content.get(Content.NBLOCKS_FIELD)); + return contentHelper.readBody(keySpace, contentColumnFamily, contentBlockId, streamId, nBlocks); + } + + public DisposableIterator> find(String keySpace, + String authorizableColumnFamily, Map properties, DirectCacheAccess cachingManager) + throws StorageClientException { + final String fKeyspace = keySpace; + final String fAuthorizableColumnFamily = authorizableColumnFamily; + List> andTerms = new ArrayList>(); + + for (Entry e : properties.entrySet()) { + String k = e.getKey(); + Object v = e.getValue(); + + if (shouldIndex(keySpace, authorizableColumnFamily, k) || (v instanceof Map)) { + if (v != null) { + if (v instanceof Map) { + List> orTerms = new ArrayList>(); + Set orResultSet = new HashSet(); + + @SuppressWarnings("unchecked") + Set> subterms = ((Map) v).entrySet(); + + for (Iterator> subtermsIter = subterms.iterator(); subtermsIter + .hasNext();) { + Entry subterm = subtermsIter.next(); + String subk = subterm.getKey(); + Object subv = subterm.getValue(); + if (shouldIndex(keySpace, authorizableColumnFamily, subk)) { + try { + Set or = new HashSet(); + String indexKey = new String(subk.getBytes("UTF-8")) + + ":" + + authorizableColumnFamily + + ":" + + StorageClientUtils.insecureHash(new String(Types + .toByteArray(subv))); + Map tempRow = get(keySpace, INDEX_COLUMN_FAMILY, indexKey); + for (Entry tempRows : tempRow.entrySet()) { + or.add(tempRows.getKey()); + } + orTerms.add(or); + } catch (IOException e1) { + LOGGER.warn("IOException {}", e1.getMessage()); + } + } + } + + if (!orTerms.isEmpty()) + orResultSet = orTerms.get(0); + + for (int i = 0; i < orTerms.size(); i++) { + orResultSet = Sets.union(orResultSet, orTerms.get(i)); + + } + andTerms.add(orResultSet); + } else { + try { + Set and = new HashSet(); + String indexKey = new String(k.getBytes("UTF-8")) + ":" + authorizableColumnFamily + + ":" + + StorageClientUtils.insecureHash(new String(Types.toByteArray(v))); + Map tempRow = get(keySpace, INDEX_COLUMN_FAMILY, indexKey); + for (Entry tempRows : tempRow.entrySet()) { + and.add(tempRows.getKey()); + } + andTerms.add(and); + } catch (IOException e1) { + LOGGER.warn("IOException {}", e1.getMessage()); + } + } + } + } + } + + Set andResultSet = new HashSet(); + + if (!andTerms.isEmpty()) + andResultSet = andTerms.get(0); + + for (int i = 0; i < andTerms.size(); i++) { + andResultSet = Sets.intersection(andResultSet, andTerms.get(i)); + } + + List> resultRows = new ArrayList>(); + + Iterator iterator = andResultSet.iterator(); + + while (iterator.hasNext()) { + Map row = get(keySpace, authorizableColumnFamily, iterator.next()); + resultRows.add(row); + } + + tResultRows = resultRows; + final Iterator fIterator = andResultSet.iterator(); + + if (tResultRows.isEmpty()) { + return new DisposableIterator>() { + + private Disposer disposer; + + public boolean hasNext() { + return false; + } + + public Map next() { + return null; + } + + public void remove() { + } + + public void close() { + if (disposer != null) { + disposer.unregisterDisposable(this); + } + } + + public void setDisposer(Disposer disposer) { + this.disposer = disposer; + } + }; + } + return registerDisposable(new PreemptiveIterator>() { + + private Map nextValue = Maps.newHashMap(); + private boolean open = true; + + protected Map internalNext() { + return nextValue; + } + + protected boolean internalHasNext() { + if (fIterator.hasNext()) { + try { + String id = fIterator.next(); + nextValue = get(fKeyspace, fAuthorizableColumnFamily, id); + LOGGER.debug("Got Row ID {} {} ", id, nextValue); + return true; + } catch (StorageClientException e) { + + } + } + close(); + nextValue = null; + LOGGER.debug("End of Set "); + return false; + } + + @Override + public void close() { + if (open) { + open = false; + } + + } + }); + } + + public DisposableIterator> listChildren(String keySpace, + String columnFamily, String key, DirectCacheAccess cachingManager) throws StorageClientException { + throw new UnsupportedOperationException(); + } + + public boolean hasBody(Map content, String streamId) { + return contentHelper.hasBody(content, streamId); + } + + private void addIndex(String keySpace, String columnFamily, String key, byte[] bname, byte[] b) + throws StorageClientException { + String indexKey = new String(bname) + ":" + columnFamily + ":" + StorageClientUtils.insecureHash(b); + Map values = new HashMap(); + values.put(key, (Object) "Whatever value of index"); + insert(keySpace, INDEX_COLUMN_FAMILY, indexKey, values, true); +} + + + private boolean shouldIndex(String keySpace, String columnFamily, String columnName) + throws StorageClientException { + if (indexColumns.contains(columnFamily + ":" + columnName)) { + LOGGER.debug("Should Index {}:{}", columnFamily, columnName); + return true; + } else { + LOGGER.debug("Should Not Index {}:{}", columnFamily, columnName); + return false; + } + } + + private T registerDisposable(T disposable) { + toDispose.add(disposable); + return disposable; + } + public void shutdownConnection() { + if (active) { + disposeDisposables(); + active = false; + } + } + + private void disposeDisposables() { + for (Disposable d : toDispose) { + d.close(); + } + } + + public DisposableIterator listAll(String keySpace, String columnFamily) { + // TODO Auto-generated method stub + return null; + } + + public long allCount(String keySpace, String columnFamily) { + // TODO Auto-generated method stub + return 0; + } + + public void setStorageClientListener(StorageClientListener storageClientListener) { + // TODO Auto-generated method stub + + } + +} \ No newline at end of file diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClientPool.java b/drivers/cassandra/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClientPool.java similarity index 73% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClientPool.java rename to drivers/cassandra/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClientPool.java index 0bb74685..c448145e 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClientPool.java +++ b/drivers/cassandra/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClientPool.java @@ -17,6 +17,8 @@ */ package org.sakaiproject.nakamura.lite.storage.cassandra; +import com.google.common.collect.ImmutableMap; + import org.apache.commons.lang.StringUtils; import org.apache.commons.pool.BasePoolableObjectFactory; import org.apache.commons.pool.PoolableObjectFactory; @@ -32,19 +34,21 @@ import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TSocket; +import org.sakaiproject.nakamura.api.lite.BaseColumnFamilyCacheManager; import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; import org.sakaiproject.nakamura.api.lite.StorageCacheManager; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; -import org.sakaiproject.nakamura.lite.storage.AbstractClientConnectionPool; -import org.sakaiproject.nakamura.lite.storage.ConcurrentLRUMap; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.AbstractClientConnectionPool; +import org.sakaiproject.nakamura.lite.storage.spi.ConcurrentLRUMap; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; -@Component(enabled = false, metatype = true, inherit = true) +@Component(enabled = true, metatype = true, inherit = true) @Service(value = StorageClientPool.class) public class CassandraClientPool extends AbstractClientConnectionPool { @@ -54,9 +58,12 @@ public class CassandraClientPool extends AbstractClientConnectionPool { @Reference(cardinality=ReferenceCardinality.OPTIONAL_UNARY, policy=ReferencePolicy.DYNAMIC) private StorageCacheManager storageManagerCache; + public static final String PROPERTIES_KEYSPACE="n"; + public static final String INDEX_COLUMN_FAMILY="smcindex"; + private static final String ROW_OF_PROPERTIES="default"; + private static final String PROPERTIES_INDEX_COLUMN_NAME="validIndex"; - - public static class ClientConnectionPoolFactory extends BasePoolableObjectFactory { + public class ClientConnectionPoolFactory extends BasePoolableObjectFactory { private String[] hosts; private int[] ports; @@ -76,7 +83,8 @@ public ClientConnectionPoolFactory(CassandraClientPool pool, String[] connection hosts[i] = spec[0]; ports[i] = Integer.parseInt(spec[1]); i++; - } + } + } @Override @@ -123,7 +131,8 @@ public Object makeObject() throws Exception { LOGGER.debug("Opened Connection {} isOpen {} Host {} Port {}", tSocket, tSocket.isOpen()); CassandraClient clientConnection = new CassandraClient(pool, tProtocol, tSocket, - properties); + properties, getIndexColumns()); + return clientConnection; } @@ -171,28 +180,48 @@ public CassandraClientPool() { @Activate public void activate(Map properties) throws ClassNotFoundException { - connections = StorageClientUtils.getSetting(properties.get(CONNECTION_POOL), - new String[] { "localhost:9160" }); - this.properties = properties; - super.activate(properties); - // this should come from the memory service ultimately. - sharedCache = new ConcurrentLRUMap(10000); - defaultStorageManagerCache = new StorageCacheManager() { - - public Map getContentCache() { - return sharedCache; - } - - public Map getAuthorizableCache() { - return sharedCache; - } - - public Map getAccessControlCache() { - return sharedCache; - } - }; + connections = StorageClientUtils.getSetting(properties.get(CONNECTION_POOL), + new String[] { "localhost:9160" }); + this.properties = properties; + super.activate(properties); + // this should come from the memory service ultimately. + + CassandraClient client = null; + try { + client = (CassandraClient) getClient(); + if (client == null) { + LOGGER.warn("No connection"); + } + else{ + // Check if properties map is already stored in the database. + Map cacheProperties = client.get(PROPERTIES_KEYSPACE,INDEX_COLUMN_FAMILY, ROW_OF_PROPERTIES); + + // If not stored, store default values. + if (cacheProperties == null) { + // FIXME: This needs to be checked, and what about the types ? + client.insert(PROPERTIES_KEYSPACE, INDEX_COLUMN_FAMILY, ROW_OF_PROPERTIES, ImmutableMap.of(PROPERTIES_INDEX_COLUMN_NAME, (Object)StringUtils.join(getIndexColumns(),";")),true); + } + } + } catch (ClientPoolException e) { + LOGGER.error("Failed to check Schema", e); + } catch(StorageClientException e){ + LOGGER.error("Storage client exception",e); + } + finally { + if (client != null) { + client.close(); + } + } + + + sharedCache = new ConcurrentLRUMap(10000); + defaultStorageManagerCache = new BaseColumnFamilyCacheManager() { + public Map getCache(String columnFamily) { + return sharedCache; + } + }; - } + } @Deactivate public void deactivate(Map properties) { diff --git a/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AccessControlManagerImplMan.java b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AccessControlManagerImplTest.java similarity index 73% rename from src/test/java/org/sakaiproject/nakamura/lite/cassandra/AccessControlManagerImplMan.java rename to drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AccessControlManagerImplTest.java index 69fb7dfc..7f0b1e04 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AccessControlManagerImplMan.java +++ b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AccessControlManagerImplTest.java @@ -19,18 +19,20 @@ import com.google.common.collect.ImmutableMap; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.accesscontrol.AbstractAccessControlManagerImplTest; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.cassandra.CassandraClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; -public class AccessControlManagerImplMan extends AbstractAccessControlManagerImplTest { +public class AccessControlManagerImplTest extends AbstractAccessControlManagerImplTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { CassandraClientPool cp = new CassandraClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AuthorizableManagerImplMan.java b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AuthorizableManagerImplTest.java similarity index 73% rename from src/test/java/org/sakaiproject/nakamura/lite/cassandra/AuthorizableManagerImplMan.java rename to drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AuthorizableManagerImplTest.java index 7ec9c8eb..66f6398a 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AuthorizableManagerImplMan.java +++ b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/AuthorizableManagerImplTest.java @@ -19,18 +19,20 @@ import com.google.common.collect.ImmutableMap; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.authorizable.AbstractAuthorizableManagerImplTest; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.cassandra.CassandraClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; -public class AuthorizableManagerImplMan extends AbstractAuthorizableManagerImplTest { +public class AuthorizableManagerImplTest extends AbstractAuthorizableManagerImplTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { CassandraClientPool cp = new CassandraClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } diff --git a/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/ContentManagerFinderImplTest.java b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/ContentManagerFinderImplTest.java new file mode 100644 index 00000000..b26e6015 --- /dev/null +++ b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/ContentManagerFinderImplTest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.cassandra; + +import com.google.common.collect.ImmutableMap; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerFinderTest; +import org.sakaiproject.nakamura.lite.storage.cassandra.CassandraClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +public class ContentManagerFinderImplTest extends AbstractContentManagerFinderTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + CassandraClientPool cp = new CassandraClientPool(); + cp.activate(ImmutableMap.of("test", (Object) "test", + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); + return cp; + } + +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/cassandra/ContentManagerImplMan.java b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/ContentManagerImplTest.java similarity index 73% rename from src/test/java/org/sakaiproject/nakamura/lite/cassandra/ContentManagerImplMan.java rename to drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/ContentManagerImplTest.java index 9b1c345f..04f9b17c 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/cassandra/ContentManagerImplMan.java +++ b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/ContentManagerImplTest.java @@ -19,18 +19,20 @@ import com.google.common.collect.ImmutableMap; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.content.AbstractContentManagerTest; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.cassandra.CassandraClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; -public class ContentManagerImplMan extends AbstractContentManagerTest { +public class ContentManagerImplTest extends AbstractContentManagerTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { CassandraClientPool cp = new CassandraClientPool(); cp.activate(ImmutableMap.of("test", (Object) "test", - BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9)); + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); return cp; } diff --git a/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/LockManagerImplTest.java b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/LockManagerImplTest.java new file mode 100644 index 00000000..9d44284d --- /dev/null +++ b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/cassandra/LockManagerImplTest.java @@ -0,0 +1,22 @@ +package org.sakaiproject.nakamura.lite.cassandra; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.lock.AbstractLockManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.cassandra.CassandraClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +import com.google.common.collect.ImmutableMap; + +public class LockManagerImplTest extends AbstractLockManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + CassandraClientPool cp = new CassandraClientPool(); + cp.activate(ImmutableMap.of("test", (Object) "test", + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, + Configuration.class.getName(), configuration)); + return cp; + } + +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsSoak.java b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsSoak.java similarity index 68% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsSoak.java rename to drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsSoak.java index da1004ed..873bdddf 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsSoak.java +++ b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsSoak.java @@ -18,34 +18,42 @@ package org.sakaiproject.nakamura.lite.soak.cassandra; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.cassandra.CassandraClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import java.io.IOException; +import java.util.Map; public class CreateUsersAndGroupsSoak extends AbstractSoakController { private int totalUsers; private StorageClientPool connectionPool; + private Configuration configuration; - public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool) { + public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool, Configuration configuration) { super(totalUsers); this.connectionPool = connectionPool; + this.configuration = configuration; this.totalUsers = totalUsers; } protected Runnable getRunnable(int nthreads) throws ClientPoolException, StorageClientException, AccessDeniedException { int usersPerThread = totalUsers / nthreads; - return new CreateUsersAndGroupsClient(usersPerThread, connectionPool); + return new CreateUsersAndGroupsClient(usersPerThread, connectionPool, configuration); } public static void main(String[] argv) throws ClientPoolException, StorageClientException, - AccessDeniedException, ClassNotFoundException { + AccessDeniedException, ClassNotFoundException, IOException { int totalUsers = 1000; int nthreads = 10; @@ -56,15 +64,23 @@ public static void main(String[] argv) throws ClientPoolException, StorageClient if (argv.length > 1) { totalUsers = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalUsers); } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); CreateUsersAndGroupsSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsSoak( - totalUsers, getConnectionPool()); + totalUsers, getConnectionPool(configuration), configuration); createUsersAndGroupsSoak.launchSoak(nthreads); } - protected static StorageClientPool getConnectionPool() throws ClassNotFoundException { + protected static StorageClientPool getConnectionPool(Configuration configuration) throws ClassNotFoundException { CassandraClientPool cp = new CassandraClientPool(); - cp.activate(ImmutableMap.of("test", (Object) "test")); + cp.activate(ImmutableMap.of("test", (Object) "test", + Configuration.class.getName(), configuration)); return cp; } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsWithMembersSoak.java b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsWithMembersSoak.java similarity index 70% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsWithMembersSoak.java rename to drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsWithMembersSoak.java index 9b1f8188..3db0c8a8 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsWithMembersSoak.java +++ b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/CreateUsersAndGroupsWithMembersSoak.java @@ -18,26 +18,34 @@ package org.sakaiproject.nakamura.lite.soak.cassandra; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsWithMembersClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; import org.sakaiproject.nakamura.lite.storage.cassandra.CassandraClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import java.io.IOException; +import java.util.Map; public class CreateUsersAndGroupsWithMembersSoak extends AbstractSoakController { private int totalUsers; private StorageClientPool connectionPool; private int totalGroups; + private Configuration configuration; public CreateUsersAndGroupsWithMembersSoak(int totalUsers, int totalGroups, - StorageClientPool connectionPool) { + StorageClientPool connectionPool, Configuration configuration) { super(totalUsers + (totalGroups * 5)); this.connectionPool = connectionPool; + this.configuration = configuration; this.totalUsers = totalUsers; this.totalGroups = totalGroups; } @@ -47,11 +55,11 @@ protected Runnable getRunnable(int nthreads) throws ClientPoolException, int usersPerThread = totalUsers / nthreads; int groupsPerThread = totalGroups / nthreads; return new CreateUsersAndGroupsWithMembersClient(usersPerThread, groupsPerThread, - connectionPool); + connectionPool, configuration); } public static void main(String[] argv) throws ClientPoolException, StorageClientException, - AccessDeniedException, ClassNotFoundException { + AccessDeniedException, ClassNotFoundException, IOException { int totalUsers = 1000; int totalGroups = 100; @@ -66,15 +74,23 @@ public static void main(String[] argv) throws ClientPoolException, StorageClient if (argv.length > 2) { totalGroups = StorageClientUtils.getSetting(Integer.valueOf(argv[2]), totalUsers); } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); CreateUsersAndGroupsWithMembersSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsWithMembersSoak( - totalUsers, totalGroups, getConnectionPool()); + totalUsers, totalGroups, getConnectionPool(configuration), configuration); createUsersAndGroupsSoak.launchSoak(nthreads); } - protected static StorageClientPool getConnectionPool() throws ClassNotFoundException { + protected static StorageClientPool getConnectionPool(Configuration configuration) throws ClassNotFoundException { CassandraClientPool cp = new CassandraClientPool(); - cp.activate(ImmutableMap.of("test", (Object) "test")); + cp.activate(ImmutableMap.of("test", (Object) "test", + Configuration.class.getName(), configuration)); return cp; } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/SoakAll.java b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/SoakAll.java similarity index 92% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/SoakAll.java rename to drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/SoakAll.java index b136e3cf..7e6ee028 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/SoakAll.java +++ b/drivers/cassandra/src/test/java/org/sakaiproject/nakamura/lite/soak/cassandra/SoakAll.java @@ -21,10 +21,12 @@ import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import java.io.IOException; + public class SoakAll { public static void main(String[] argv) throws ClientPoolException, StorageClientException, - AccessDeniedException, ClassNotFoundException { + AccessDeniedException, ClassNotFoundException, IOException { CreateUsersAndGroupsSoak.main(argv); CreateUsersAndGroupsWithMembersSoak.main(argv); } diff --git a/drivers/hbase/pom.xml b/drivers/hbase/pom.xml new file mode 100644 index 00000000..31150763 --- /dev/null +++ b/drivers/hbase/pom.xml @@ -0,0 +1,98 @@ + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.hbase-driver + bundle + 0.1-SNAPSHOT + Sparse Map :: HBase SPI Implementation + Storage SPI implementation using HBase + + scm:git:git://github.com/sakaiproject/sparsemapcontent.git + scm:git:git@github.com:sakaiproject/sparsemapcontent.git + http://github.com/sakaiproject/sparsemapcontent/ + + + UTF-8 + + + + + org.apache.felix + maven-bundle-plugin + true + + + driver + !* + !* + + org.sakaiproject.nakamura.core + org.sakaiproject.nakamura.lite.storage.hbase.* + + + + + + + + org.apache.hbase + hbase + 0.90.3 + provided + + + slf4j-log4j12 + org.slf4j + + + + + + org.apache.felix + org.apache.felix.scr.annotations + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5.1-SNAPSHOT + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + tests + 1.5.1-SNAPSHOT + test + + + org.slf4j + slf4j-api + 1.5.10 + + + org.slf4j + slf4j-simple + 1.5.10 + test + + + junit + junit + 4.4 + test + + + findbugs + annotations + 1.0.0 + provided + + + + diff --git a/drivers/hbase/src/main/java/org/sakaiproject/nakamura/lite/storage/hbase/HBaseStorageClient.java b/drivers/hbase/src/main/java/org/sakaiproject/nakamura/lite/storage/hbase/HBaseStorageClient.java new file mode 100644 index 00000000..4015fa87 --- /dev/null +++ b/drivers/hbase/src/main/java/org/sakaiproject/nakamura/lite/storage/hbase/HBaseStorageClient.java @@ -0,0 +1,471 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.hbase; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.NavigableMap; +import java.util.Set; + +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hbase.client.HTablePool; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.sakaiproject.nakamura.lite.storage.spi.Disposable; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.Disposer; +import org.sakaiproject.nakamura.lite.storage.spi.SparseRow; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientListener; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockSetContentHelper; +import org.sakaiproject.nakamura.lite.storage.spi.types.Types; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +public class HBaseStorageClient implements StorageClient { + + private static final Logger LOGGER = LoggerFactory.getLogger(HBaseStorageClient.class); + public static final String CONFIG_BLOCK_SIZE = "block-size"; + public static final String CONFIG_MAX_CHUNKS_PER_BLOCK = "chunks-per-block"; + + private static final int DEFAULT_BLOCK_SIZE = 1024 * 1024; + private static final int DEFAULT_MAX_CHUNKS_PER_BLOCK = 64; + private static final String INDEX_COLUMN_FAMILY = "smcindex"; + private BlockContentHelper contentHelper; + private int blockSize; + private int maxChunksPerBlockSet; + private HBaseStorageClientPool pool; + HTablePool htab; + public List> tResultRows; + private boolean active; + private List toDispose = Lists.newArrayList(); + + public HBaseStorageClient(HBaseStorageClientPool pool, Map properties, + HTablePool htab) { + this.pool = pool; + this.htab = htab; + contentHelper = new BlockSetContentHelper(this); + blockSize = StorageClientUtils.getSetting(properties.get(CONFIG_BLOCK_SIZE), + DEFAULT_BLOCK_SIZE); + maxChunksPerBlockSet = StorageClientUtils.getSetting( + properties.get(CONFIG_MAX_CHUNKS_PER_BLOCK), DEFAULT_MAX_CHUNKS_PER_BLOCK); + } + + public void insert(String keySpace, String columnFamily, String key, + Map values, boolean probablyNew) throws StorageClientException { + HTableInterface table = null; + try { + table = htab.getTable(columnFamily); + Put row = new Put(key.getBytes("UTF-8"), System.currentTimeMillis()); + // TODO We need to handle deletions correctly. + // If the deleted flag is set we should probably just delete completely. + + for (Entry value : values.entrySet()) { + String q = value.getKey(); + byte[] qualifier = null; + qualifier = q.getBytes("UTF-8"); + Object v = value.getValue(); + byte qualifierValue[] = Types.toByteArray(v); + + if (v instanceof RemoveProperty) { + Delete delRow = new Delete(key.getBytes("UTF-8")); + delRow.deleteColumns(columnFamily.getBytes("UTF-8"), qualifier); + table.delete(delRow); + } else { + row.add(columnFamily.getBytes("UTF-8"), qualifier, System.currentTimeMillis(), + qualifierValue); + table.put(row); + + if ((!columnFamily.equals(INDEX_COLUMN_FAMILY)) + && shouldIndex(keySpace, columnFamily, q)) { + addIndex(keySpace, columnFamily, key, qualifier, qualifierValue); + } + } + } + } catch (UnsupportedEncodingException e1) { + LOGGER.debug(e1.getMessage()); + } catch (IOException e) { + LOGGER.debug("IOException. Stack trace:", e.getStackTrace()); + } finally { + if (htab != null) { + htab.putTable(table); + } + } + + } + + public Map get(String keySpace, String columnFamily, String key) + throws StorageClientException { + NavigableMap row; + HTableInterface table = null; + Map resultRow = new HashMap(); + try { + table = htab.getTable(columnFamily); + + Get getRow = new Get(key.getBytes("UTF-8")); + Result rowResult = table.get(getRow); + row = rowResult.getFamilyMap(columnFamily.getBytes("UTF-8")); + + for (Entry value : row.entrySet()) { + String valueKey = new String(value.getKey()); + Object valueObject = Types.toObject(value.getValue()); + + resultRow.put(valueKey, valueObject); + } + + } catch (UnsupportedEncodingException e1) { + LOGGER.debug(e1.getMessage()); + } catch (IOException e1) { + LOGGER.debug(e1.getMessage()); + } catch (Exception e) { + LOGGER.debug(e.getMessage()); + } finally { + if (htab != null) { + htab.putTable(table); + } + } + return resultRow; + } + + public void remove(String keySpace, String columnFamily, String key) + throws StorageClientException { + HTableInterface indexTable = null; + HTableInterface table = null; + if (!columnFamily.equals(INDEX_COLUMN_FAMILY)) { + Map row = get(keySpace, columnFamily, key); + + try { + indexTable = htab.getTable(INDEX_COLUMN_FAMILY); + for (Entry value : row.entrySet()) { + String qualifierName = value.getKey(); + String qualifierValue = null; + qualifierValue = new String(Types.toByteArray(value.getValue())); + qualifierValue = StorageClientUtils.insecureHash(qualifierValue); + String indexKey = qualifierName + ":" + INDEX_COLUMN_FAMILY + ":" + + StorageClientUtils.insecureHash(qualifierValue); + Delete delIndexKey = new Delete(indexKey.getBytes("UTF-8")); + delIndexKey.deleteColumns(columnFamily.getBytes("UTF-8"), + qualifierName.getBytes("UTF-8")); + indexTable.delete(delIndexKey); + } + } catch (IOException e) { + LOGGER.debug("IOException. Stack trace:",e); + } + } + + try { + table = htab.getTable(columnFamily); + Delete delRow = new Delete(key.getBytes("UTF-8")); + delRow.deleteFamily(columnFamily.getBytes("UTF-8")); + table.delete(delRow); + } catch (UnsupportedEncodingException e1) { + LOGGER.debug(e1.getMessage()); + } catch (IOException e) { + LOGGER.debug("IOException. Stack trace:", e.getStackTrace()); + } finally { + if (htab != null) { + htab.putTable(table); + htab.putTable(indexTable); + } + } + } + + private boolean shouldIndex(String keySpace, String columnFamily, String columnName) + throws StorageClientException { + + Set indexColumns = pool.getIndexColumns(); + + if (indexColumns.contains(columnFamily + ":" + columnName)) { + LOGGER.debug("Should Index {}:{}", columnFamily, columnName); + return true; + } else { + LOGGER.debug("Should Not Index {}:{}", columnFamily, columnName); + return false; + } + + } + + private void addIndex(String keySpace, String columnFamily, String key, byte[] bname, + byte[] b) throws StorageClientException { + String indexKey = new String(bname) + ":" + columnFamily + ":" + + StorageClientUtils.insecureHash(new String(b)); + Map values = new HashMap(); + values.put(key, (Object) "Value of index yet to be decided"); + insert(keySpace, INDEX_COLUMN_FAMILY, indexKey, values, true); + } + + public boolean hasBody(Map content, String streamId) { + return contentHelper.hasBody(content, streamId); + } + + public Map streamBodyIn(String keySpace, String contentColumnFamily, + String contentId, String contentBlockId, String streamId, + Map content, InputStream in) throws StorageClientException, + AccessDeniedException, IOException { + return contentHelper.writeBody(keySpace, contentColumnFamily, contentId, + contentBlockId, streamId, blockSize, maxChunksPerBlockSet, in); + } + + public void close() { + pool.releaseClient(this); + } + + public InputStream streamBodyOut(String keySpace, String contentColumnFamily, + String contentId, String contentBlockId, String streamId, + Map content) throws StorageClientException, AccessDeniedException, + IOException { + int nBlocks = StorageClientUtils.toInt(content.get(Content.NBLOCKS_FIELD)); + return contentHelper.readBody(keySpace, contentColumnFamily, contentBlockId, + streamId, nBlocks); + } + + public DisposableIterator> find(String keySpace, + String authorizableColumnFamily, Map properties, DirectCacheAccess cachingManager) + throws StorageClientException { + final String fKeyspace = keySpace; + final String fAuthorizableColumnFamily = authorizableColumnFamily; + List> andTerms = new ArrayList>(); + + for (Entry e : properties.entrySet()) { + String k = e.getKey(); + Object v = e.getValue(); + + if (shouldIndex(keySpace, authorizableColumnFamily, k) || (v instanceof Map)) { + if (v != null) { + if (v instanceof Map) { + List> orTerms = new ArrayList>(); + Set orResultSet = new HashSet(); + + @SuppressWarnings("unchecked") + Set> subterms = ((Map) v).entrySet(); + + for (Iterator> subtermsIter = subterms.iterator(); subtermsIter + .hasNext();) { + Entry subterm = subtermsIter.next(); + String subk = subterm.getKey(); + Object subv = subterm.getValue(); + if (shouldIndex(keySpace, authorizableColumnFamily, subk)) { + try { + Set or = new HashSet(); + String indexKey = new String(subk.getBytes("UTF-8")) + + ":" + + authorizableColumnFamily + + ":" + + StorageClientUtils.insecureHash(new String(Types + .toByteArray(subv))); + Map tempRow = get(keySpace, INDEX_COLUMN_FAMILY, indexKey); + for (Entry tempRows : tempRow.entrySet()) { + or.add(tempRows.getKey()); + } + orTerms.add(or); + } catch (IOException e1) { + LOGGER.warn("IOException {}", e1.getMessage()); + } + } + } + + if (!orTerms.isEmpty()) + orResultSet = orTerms.get(0); + + for (int i = 0; i < orTerms.size(); i++) { + orResultSet = Sets.union(orResultSet, orTerms.get(i)); + + } + andTerms.add(orResultSet); + } else { + try { + Set and = new HashSet(); + String indexKey = new String(k.getBytes("UTF-8")) + ":" + authorizableColumnFamily + + ":" + + StorageClientUtils.insecureHash(new String(Types.toByteArray(v))); + Map tempRow = get(keySpace, INDEX_COLUMN_FAMILY, indexKey); + for (Entry tempRows : tempRow.entrySet()) { + and.add(tempRows.getKey()); + } + andTerms.add(and); + } catch (IOException e1) { + LOGGER.warn("IOException {}", e1.getMessage()); + } + } + } + } + } + + Set andResultSet = new HashSet(); + + if (!andTerms.isEmpty()) + andResultSet = andTerms.get(0); + + for (int i = 0; i < andTerms.size(); i++) { + andResultSet = Sets.intersection(andResultSet, andTerms.get(i)); + } + + List> resultRows = new ArrayList>(); + + Iterator iterator = andResultSet.iterator(); + + while (iterator.hasNext()) { + Map row = get(keySpace, authorizableColumnFamily, iterator.next()); + resultRows.add(row); + } + + tResultRows = resultRows; + final Iterator fIterator = andResultSet.iterator(); + + if (tResultRows.isEmpty()) { + return new DisposableIterator>() { + private Disposer disposer; + public boolean hasNext() { + return false; + } + + public Map next() { + return null; + } + + public void remove() { + } + + public void close() { + if (disposer != null) { + disposer.unregisterDisposable(this); + } + } + + public void setDisposer(Disposer disposer) { + this.disposer = disposer; + + } + }; + } + return registerDisposable(new PreemptiveIterator>() { + + private Map nextValue = Maps.newHashMap(); + private boolean open = true; + + @Override + protected Map internalNext() { + return nextValue; + } + + @Override + protected boolean internalHasNext() { + if (fIterator.hasNext()) { + try { + String id = fIterator.next(); + nextValue = get(fKeyspace, fAuthorizableColumnFamily, id); + LOGGER.debug("Got Row ID {} {} ", id, nextValue); + return true; + } catch (StorageClientException e) { + + } + } + close(); + nextValue = null; + LOGGER.debug("End of Set "); + return false; + } + + @Override + public void close() { + if (open) { + open = false; + } + + } + }); + } + + public DisposableIterator> listChildren(String keySpace, + String columnFamily, String key, DirectCacheAccess cachingManager) throws StorageClientException { + throw new UnsupportedOperationException(); + } + + public void passivate() { + } + + public void activate() { + } + + public void validate() { + } + + public void destroy() { + try { + ((HTableInterface) htab).close(); + } catch (IOException e) { + LOGGER.debug(e.getMessage()); + } + } + + private T registerDisposable(T disposable) { + toDispose.add(disposable); + return disposable; + } + + public void shutdownConnection() { + if (active) { + disposeDisposables(); + active = false; + } + } + + private void disposeDisposables() { + for (Disposable d : toDispose) { + d.close(); + } + } + +public DisposableIterator listAll(String keySpace, String columnFamily) { + // TODO Auto-generated method stub + return null; +} + +public long allCount(String keySpace, String columnFamily) { + // TODO Auto-generated method stub + return 0; +} + +public void setStorageClientListener(StorageClientListener storageClientListener) { + // TODO Auto-generated method stub + +} + +} \ No newline at end of file diff --git a/drivers/hbase/src/main/java/org/sakaiproject/nakamura/lite/storage/hbase/HBaseStorageClientPool.java b/drivers/hbase/src/main/java/org/sakaiproject/nakamura/lite/storage/hbase/HBaseStorageClientPool.java new file mode 100644 index 00000000..714702cb --- /dev/null +++ b/drivers/hbase/src/main/java/org/sakaiproject/nakamura/lite/storage/hbase/HBaseStorageClientPool.java @@ -0,0 +1,146 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.hbase; + +import java.util.Map; + +import org.apache.commons.pool.BasePoolableObjectFactory; +import org.apache.commons.pool.PoolableObjectFactory; +import org.apache.felix.scr.annotations.Deactivate; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.ReferenceCardinality; +import org.apache.felix.scr.annotations.ReferencePolicy; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.client.HTablePool; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.StorageCacheManager; +import org.sakaiproject.nakamura.lite.storage.spi.AbstractClientConnectionPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class HBaseStorageClientPool extends AbstractClientConnectionPool { + + private static final Logger LOGGER = LoggerFactory + .getLogger(HBaseStorageClientPool.class); + + @Reference(cardinality = ReferenceCardinality.OPTIONAL_UNARY, policy = ReferencePolicy.DYNAMIC) + private StorageCacheManager storageManagerCache; + private String connection = "127.0.0.1:2181"; + private Map properties; + + public static class ClientConnectionPoolFactory extends BasePoolableObjectFactory { + private Map properties; + private HBaseStorageClientPool pool; + private HTablePool htablePool = null; + + public ClientConnectionPoolFactory(HBaseStorageClientPool pool, String connection, + Map properties) { + this.properties = properties; + this.pool = pool; + + String[] connectionArray = connection.split(":"); + String host = connectionArray[0]; + String port = connectionArray[1]; + + Configuration config = HBaseConfiguration.create(); + config.set("hbase.zookeeper.quorum", host + ":" + port); + config.set("hbase.zookeeper.property.clientPort", port); + htablePool = new HTablePool(config, 10); + + } + + @Override + public Object makeObject() throws Exception { + HBaseStorageClient client = new HBaseStorageClient(pool, properties, htablePool); + return client; + } + + @Override + public void passivateObject(Object obj) throws Exception { + HBaseStorageClient clientConnection = (HBaseStorageClient) obj; + clientConnection.passivate(); + super.passivateObject(obj); + } + + @Override + public void activateObject(Object obj) throws Exception { + HBaseStorageClient clientConnection = (HBaseStorageClient) obj; + clientConnection.activate(); + super.activateObject(obj); + } + + @Override + public void destroyObject(Object obj) throws Exception { + HBaseStorageClient clientConnection = (HBaseStorageClient) obj; + clientConnection.destroy(); + } + + @Override + public boolean validateObject(Object obj) { + HBaseStorageClient clientConnection = (HBaseStorageClient) obj; + try { + clientConnection.validate(); + } catch (Exception e) { + LOGGER.error("Failed to validate connection " + e.getMessage(), e); + return false; + } + return super.validateObject(obj); + } + + } + + @Override + protected PoolableObjectFactory getConnectionPoolFactory() { + return new ClientConnectionPoolFactory(this, connection, properties); + } + + public void activate(Map properties) throws ClassNotFoundException { + this.properties = properties; + super.activate(properties); + // this should come from the memory service ultimately. + + HBaseStorageClient client = null; + try { + client = (HBaseStorageClient) getClient(); + if (client == null) { + LOGGER.warn("No connection"); + } + } catch (ClientPoolException e) { + LOGGER.error("Failed to check Schema", e); + } finally { + if (client != null) { + client.close(); + } + } + + + } + + @Deactivate + public void deactivate(Map properties) { + super.deactivate(properties); + } + + public StorageCacheManager getStorageCacheManager() { + if (storageManagerCache != null) { + return storageManagerCache; + } + return null; + } +} \ No newline at end of file diff --git a/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/AccessControlManagerImplTest.java b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/AccessControlManagerImplTest.java new file mode 100644 index 00000000..f30b6c1c --- /dev/null +++ b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/AccessControlManagerImplTest.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.hbase; + +import com.google.common.collect.ImmutableMap; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.accesscontrol.AbstractAccessControlManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.hbase.HBaseStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +; + +public class AccessControlManagerImplTest extends AbstractAccessControlManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException { + if ( true ) { + return null; + } + HBaseStorageClientPool cp = new HBaseStorageClientPool(); + cp.activate(ImmutableMap.of("test", (Object) "test", + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, Configuration.class.getName(), + configuration)); + return cp; + } + +} diff --git a/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/AuthorizableManagerImplTest.java b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/AuthorizableManagerImplTest.java new file mode 100644 index 00000000..073305fb --- /dev/null +++ b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/AuthorizableManagerImplTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.hbase; + +import com.google.common.collect.ImmutableMap; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.authorizable.AbstractAuthorizableManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.hbase.HBaseStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +public class AuthorizableManagerImplTest extends AbstractAuthorizableManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException { + if ( true ) { + return null; + } + HBaseStorageClientPool cp = new HBaseStorageClientPool(); + cp.activate(ImmutableMap.of("test", (Object) "test", + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, Configuration.class.getName(), + configuration)); + return cp; + } + +} diff --git a/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/ContentManagerFinderImplTest.java b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/ContentManagerFinderImplTest.java new file mode 100644 index 00000000..57fd1174 --- /dev/null +++ b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/ContentManagerFinderImplTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.hbase; + +import com.google.common.collect.ImmutableMap; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerFinderTest; +import org.sakaiproject.nakamura.lite.storage.hbase.HBaseStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +public class ContentManagerFinderImplTest extends AbstractContentManagerFinderTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException { + if ( true ) { + return null; + } + HBaseStorageClientPool cp = new HBaseStorageClientPool(); + cp.activate(ImmutableMap.of("test", (Object) "test", + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, Configuration.class.getName(), + configuration)); + return cp; + } + +} diff --git a/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/ContentManagerImplTest.java b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/ContentManagerImplTest.java new file mode 100644 index 00000000..fcc7a796 --- /dev/null +++ b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/ContentManagerImplTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.hbase; + +import com.google.common.collect.ImmutableMap; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerTest; +import org.sakaiproject.nakamura.lite.storage.hbase.HBaseStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +public class ContentManagerImplTest extends AbstractContentManagerTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException { + if ( true ) { + return null; + } + HBaseStorageClientPool cp = new HBaseStorageClientPool(); + cp.activate(ImmutableMap.of("test", (Object) "test", + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, Configuration.class.getName(), + configuration)); + return cp; + } + +} diff --git a/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/LockManagerImplTest.java b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/LockManagerImplTest.java new file mode 100644 index 00000000..72fdb525 --- /dev/null +++ b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/hbase/LockManagerImplTest.java @@ -0,0 +1,26 @@ +package org.sakaiproject.nakamura.lite.hbase; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.lock.AbstractLockManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.hbase.HBaseStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.content.BlockContentHelper; + +import com.google.common.collect.ImmutableMap; + +public class LockManagerImplTest extends AbstractLockManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException { + if ( true ) { + return null; + } + HBaseStorageClientPool cp = new HBaseStorageClientPool(); + cp.activate(ImmutableMap.of("test", (Object) "test", + BlockContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK, 9, Configuration.class.getName(), + configuration)); + return cp; + } + +} diff --git a/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/soak/hbase/CreateUsersAndGroupsSoak.java b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/soak/hbase/CreateUsersAndGroupsSoak.java new file mode 100644 index 00000000..8a2b8992 --- /dev/null +++ b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/soak/hbase/CreateUsersAndGroupsSoak.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.hbase; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; +import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsClient; +import org.sakaiproject.nakamura.lite.storage.hbase.HBaseStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import java.io.IOException; +import java.util.Map; + +public class CreateUsersAndGroupsSoak extends AbstractSoakController { + private int totalUsers; + private StorageClientPool connectionPool; + private Configuration configuration; + + public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool, Configuration configuration) { + super(totalUsers); + this.connectionPool = connectionPool; + this.configuration = configuration; + this.totalUsers = totalUsers; + } + + protected Runnable getRunnable(int nthreads) throws ClientPoolException, + StorageClientException, AccessDeniedException { + int usersPerThread = totalUsers / nthreads; + return new CreateUsersAndGroupsClient(usersPerThread, connectionPool, configuration); + } + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + + int totalUsers = 1000; + int nthreads = 10; + + if (argv.length > 0) { + nthreads = StorageClientUtils.getSetting(Integer.valueOf(argv[0]), nthreads); + } + if (argv.length > 1) { + totalUsers = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalUsers); + } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + + CreateUsersAndGroupsSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsSoak( + totalUsers, getConnectionPool(configuration), configuration); + createUsersAndGroupsSoak.launchSoak(nthreads); + } + + protected static StorageClientPool getConnectionPool(Configuration configuration) throws ClassNotFoundException { + HBaseStorageClientPool hp = new HBaseStorageClientPool(); + hp.activate(ImmutableMap.of("test", (Object) "test", + Configuration.class.getName(), configuration)); + return hp; + } + +} diff --git a/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/soak/hbase/CreateUsersAndGroupsWithMembersSoak.java b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/soak/hbase/CreateUsersAndGroupsWithMembersSoak.java new file mode 100644 index 00000000..bce8e005 --- /dev/null +++ b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/soak/hbase/CreateUsersAndGroupsWithMembersSoak.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.hbase; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; +import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsWithMembersClient; +import org.sakaiproject.nakamura.lite.storage.hbase.HBaseStorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import java.io.IOException; +import java.util.Map; + +public class CreateUsersAndGroupsWithMembersSoak extends AbstractSoakController { + + private int totalUsers; + private StorageClientPool connectionPool; + private int totalGroups; + private Configuration configuration; + + public CreateUsersAndGroupsWithMembersSoak(int totalUsers, int totalGroups, + StorageClientPool connectionPool, Configuration configuration) { + super(totalUsers + (totalGroups * 5)); + this.connectionPool = connectionPool; + this.configuration = configuration; + this.totalUsers = totalUsers; + this.totalGroups = totalGroups; + } + + protected Runnable getRunnable(int nthreads) throws ClientPoolException, + StorageClientException, AccessDeniedException { + int usersPerThread = totalUsers / nthreads; + int groupsPerThread = totalGroups / nthreads; + return new CreateUsersAndGroupsWithMembersClient(usersPerThread, groupsPerThread, + connectionPool, configuration); + } + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + + int totalUsers = 1000; + int totalGroups = 100; + int nthreads = 10; + + if (argv.length > 0) { + nthreads = StorageClientUtils.getSetting(Integer.valueOf(argv[0]), nthreads); + } + if (argv.length > 1) { + totalUsers = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalUsers); + } + if (argv.length > 2) { + totalGroups = StorageClientUtils.getSetting(Integer.valueOf(argv[2]), totalUsers); + } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + + CreateUsersAndGroupsWithMembersSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsWithMembersSoak( + totalUsers, totalGroups, getConnectionPool(configuration), configuration); + createUsersAndGroupsSoak.launchSoak(nthreads); + } + + protected static StorageClientPool getConnectionPool(Configuration configuration) throws ClassNotFoundException { + HBaseStorageClientPool hp = new HBaseStorageClientPool(); + hp.activate(ImmutableMap.of("test", (Object) "test", + Configuration.class.getName(), configuration)); + return hp; + } + +} diff --git a/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/soak/hbase/SoakAll.java b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/soak/hbase/SoakAll.java new file mode 100644 index 00000000..6bca1e4e --- /dev/null +++ b/drivers/hbase/src/test/java/org/sakaiproject/nakamura/lite/soak/hbase/SoakAll.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.hbase; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; + +import java.io.IOException; + +public class SoakAll { + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + CreateUsersAndGroupsSoak.main(argv); + CreateUsersAndGroupsWithMembersSoak.main(argv); + } +} diff --git a/drivers/jdbc/README.md b/drivers/jdbc/README.md new file mode 100644 index 00000000..d9c74c3d --- /dev/null +++ b/drivers/jdbc/README.md @@ -0,0 +1,5 @@ +# Generic JDBC SPI Storage implementation + +## Summary + +This package is a generic JDBC implementation of the sparsemapcontent storage SPI. It provides a component (JDBCStorageClientPool) that allows you to configure common JDBC properties (e.g., driver class, jdbc url, username and password). This driver assumes that you will also make available the appropriate JDBC library on the classpath, as it does not embed any JDBC drivers itself. diff --git a/drivers/jdbc/pom.xml b/drivers/jdbc/pom.xml new file mode 100644 index 00000000..96bbe4b4 --- /dev/null +++ b/drivers/jdbc/pom.xml @@ -0,0 +1,139 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + org.sakaiproject.nakamura.jdbc-driver + bundle + 0.1-SNAPSHOT + Sakai Nakamura :: Generic JDBC SPI Implementation + Storage SPI implementation using JDBC + + scm:git:git://github.com/sakaiproject/sparsemapcontent.git + scm:git:git@github.com:sakaiproject/sparsemapcontent.git + http://github.com/sakaiproject/sparsemapcontent/ + + + UTF-8 + + + + + org.apache.felix + maven-bundle-plugin + true + + + driver + !* + !* + + org.sakaiproject.nakamura.core + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.5 + + + **/Test*.java + **/*Test.java + **/*TestCase.java + + + + **/oracle/** + **/mysql/** + **/postgresql/** + + + + + + + + + + org.apache.felix + org.apache.felix.scr.annotations + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5.1-SNAPSHOT + + + findbugs + annotations + 1.0.0 + provided + + + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5.1-SNAPSHOT + tests + test + + + org.slf4j + slf4j-simple + 1.5.10 + test + + + junit + junit + 4.4 + test + + + + + mysql + mysql-connector-java + 5.1.17 + test + + + org.apache.derby + derby + 10.6.2.1 + test + + + postgresql + postgresql + 9.0-801.jdbc4 + test + + + + + + diff --git a/drivers/jdbc/scripts/mysql/drop.sh b/drivers/jdbc/scripts/mysql/drop.sh new file mode 100755 index 00000000..26fdb800 --- /dev/null +++ b/drivers/jdbc/scripts/mysql/drop.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +mysql -f -v -u root << EOSQL +drop database nakamura; +create database nakamura default character set utf8; +grant all on sakaiuser.* to sakaiuser@'127.0.0.1' identified by 'ironchef'; +grant all on sakaiuser.* to sakaiuser@'localhost' identified by 'ironchef'; +exit +EOSQL + + diff --git a/drivers/jdbc/scripts/oracle/drop.sql b/drivers/jdbc/scripts/oracle/drop.sql new file mode 100644 index 00000000..5fba2014 --- /dev/null +++ b/drivers/jdbc/scripts/oracle/drop.sql @@ -0,0 +1,22 @@ + DROP TABLE css cascade constraints; + DROP TABLE au_css cascade constraints; + DROP TABLE cn_css cascade constraints; + DROP TABLE ac_css cascade constraints; + DROP TABLE lk_css cascade constraints; + DROP TABLE css_w cascade constraints; + DROP TABLE ac_css_w cascade constraints; + DROP TABLE au_css_w cascade constraints; + DROP TABLE cn_css_w cascade constraints; + DROP TABLE lk_css_w cascade constraints; + DROP TABLE css_wr cascade constraints; + DROP TABLE css_b cascade constraints; + DROP TABLE cn_css_b cascade constraints; + DROP TABLE au_css_b cascade constraints; + DROP TABLE ac_css_b cascade constraints; + DROP TABLE lk_css_b cascade constraints; + DROP SEQUENCE SEQ_CSS_ID; + DROP SEQUENCE SEQ_AC_CSS_ID; + DROP SEQUENCE SEQ_AU_CSS_ID; + DROP SEQUENCE SEQ_CN_CSS_ID; + DROP SEQUENCE SEQ_LK_CSS_ID; + \ No newline at end of file diff --git a/drivers/jdbc/scripts/postgresql/drop.sh b/drivers/jdbc/scripts/postgresql/drop.sh new file mode 100755 index 00000000..de163b9e --- /dev/null +++ b/drivers/jdbc/scripts/postgresql/drop.sh @@ -0,0 +1,25 @@ +#!/bin/sh -e + + +/Library/PostgreSQL/9.0/bin/psql -h localhost -U nakamura nak << EOF +DROP TABLE IF EXISTS css CASCADE; +DROP TABLE IF EXISTS ac_css CASCADE; +DROP TABLE IF EXISTS au_css CASCADE; +DROP TABLE IF EXISTS cn_css CASCADE; +DROP TABLE IF EXISTS lk_css CASCADE; +DROP TABLE IF EXISTS css_b CASCADE; +DROP TABLE IF EXISTS au_css_b CASCADE; +DROP TABLE IF EXISTS ac_css_b CASCADE; +DROP TABLE IF EXISTS cn_css_b CASCADE; +DROP TABLE IF EXISTS lk_css_b CASCADE; +DROP TABLE IF EXISTS css_w CASCADE; +DROP TABLE IF EXISTS ac_css_w CASCADE; +DROP TABLE IF EXISTS au_css_w CASCADE; +DROP TABLE IF EXISTS cn_css_w CASCADE; +DROP TABLE IF EXISTS lk_css_w CASCADE; +DROP TABLE IF EXISTS css_wr CASCADE; +EOF + + + + diff --git a/drivers/jdbc/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClientPool.java b/drivers/jdbc/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClientPool.java new file mode 100644 index 00000000..e040cd5e --- /dev/null +++ b/drivers/jdbc/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClientPool.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.jdbc; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +/** + * A concrete stub for a generic JDBC storage client pool component. The super on its + * own has all the components necessary to be a generic JDBC driver on its own, therefore we + * just need to activate the OSGi component properties. + */ +@Component(immediate = true, metatype = true, inherit = true) +@Service(value = StorageClientPool.class) +public class JDBCStorageClientPool extends BaseJDBCStorageClientPool { + +} diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.5.5.ddl b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.5.5.ddl new file mode 100644 index 00000000..d3a3b0e4 --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.5.5.ddl @@ -0,0 +1,150 @@ +# If using mySQL 5.1 you can use innodb_autoinc_lock_mode=1 and have an an autoinc PK. +# Having and autoink PK in 5.0 and earlier will lead to table serialization as the key generation requires a full table lock which is why we have no +# PK in these tables +# The access mechanism must be update then insert to allow no PK and no Unique key. +# Please read http://harrison-fisk.blogspot.com/2009/02/my-favorite-new-feature-of-mysql-51.html for info. +# +# Please read for proper UTF-8 encoding support: +# http://rentzsch.tumblr.com/post/9133498042/howto-use-utf-8-throughout-your-web-stack + +#### DROP TABLE IF EXISTS `css`; + +CREATE TABLE `css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` USING BTREE (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +##### DROP TABLE IF EXISTS `au_css`; + +CREATE TABLE `au_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` USING BTREE (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +###### DROP TABLE IF EXISTS `cn_css`; + +CREATE TABLE `cn_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` USING BTREE (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +###### DROP TABLE IF EXISTS `lk_css`; + +CREATE TABLE `ac_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` USING BTREE (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +###### DROP TABLE IF EXISTS `ac_css`; + +CREATE TABLE `lk_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` USING BTREE (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +CREATE TABLE css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE ac_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE au_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE cn_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE lk_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE css_wr ( + `id` INT NOT NULL AUTO_INCREMENT, + `cf` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `cname` varchar(64) NOT NULL, + primary key(`id`), + unique key css_r_cid (`cf`,`cid`), + unique key css_r_cnam (`cf`,`cname`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + + +# Body Store. In some cases we want to store the bodies of the objects in a binary serialized lump +# This allows us to load and save the sparse map without using multiple records in the above tables and hence is more compact +# And uses less bandwidth to the DB. +# Where this is done, we still index certain fields as defined in index_cols + +CREATE TABLE `css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `cn_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `au_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `ac_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `lk_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + + diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.5.ddl b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.5.ddl new file mode 100644 index 00000000..d4355ff0 --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.5.ddl @@ -0,0 +1,148 @@ +# If using mySQL 5.1 you can use innodb_autoinc_lock_mode=1 and have an an autoinc PK. +# Having and autoink PK in 5.0 and earlier will lead to table serialization as the key generation requires a full table lock which is why we have no +# PK in these tables +# The access mechanism must be update then insert to allow no PK and no Unique key. +# Please read http://harrison-fisk.blogspot.com/2009/02/my-favorite-new-feature-of-mysql-51.html for info. +# +# Please read for proper UTF-8 encoding support: +# http://rentzsch.tumblr.com/post/9133498042/howto-use-utf-8-throughout-your-web-stack + +#### DROP TABLE IF EXISTS `css`; + +CREATE TABLE `css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +##### DROP TABLE IF EXISTS `au_css`; + +CREATE TABLE `au_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +###### DROP TABLE IF EXISTS `cn_css`; + +CREATE TABLE `cn_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +###### DROP TABLE IF EXISTS `ac_css`; + +CREATE TABLE `ac_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +###### DROP TABLE IF EXISTS `lk_css`; + +CREATE TABLE `lk_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +CREATE TABLE css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE ac_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE au_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE cn_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE lk_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE css_wr ( + `id` INT NOT NULL AUTO_INCREMENT, + `cf` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `cname` varchar(64) NOT NULL, + primary key(`id`), + unique key css_r_cid (`cf`,`cid`), + unique key css_r_cnam (`cf`,`cname`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +# Body Store. In some cases we want to store the bodies of the objects in a binary serialized lump +# This allows us to load and save the sparse map without using multiple records in the above tables and hence is more compact +# And uses less bandwidth to the DB. +# Where this is done, we still index certain fields as defined in index_cols + +CREATE TABLE `css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `cn_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `au_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `ac_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE `lk_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + PRIMARY KEY (`rid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + + diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.ddl b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.ddl new file mode 100644 index 00000000..473b9cdd --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.ddl @@ -0,0 +1,153 @@ +# If using mySQL 5.1 you can use innodb_autoinc_lock_mode=1 and have an an autoinc PK. +# Having and autoink PK in 5.0 and earlier will lead to table serialization as the key generation requires a full table lock which is why we have no +# PK in these tables +# The access mechanism must be update then insert to allow no PK and no Unique key. +# Please read http://harrison-fisk.blogspot.com/2009/02/my-favorite-new-feature-of-mysql-51.html for info. +# +# Please read for proper UTF-8 encoding support: +# http://rentzsch.tumblr.com/post/9133498042/howto-use-utf-8-throughout-your-web-stack + +######## DROP TABLE IF EXISTS `css`; + +# Central store + +CREATE TABLE `css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +######## DROP TABLE IF EXISTS `au_css`; + +# Store just for Authorizables +CREATE TABLE `au_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +####### DROP TABLE IF EXISTS `cn_css`; + +# Store just for Content +CREATE TABLE `cn_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +####### DROP TABLE IF EXISTS `ac_css`; + +# Store just for Access Control +CREATE TABLE `ac_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +####### DROP TABLE IF EXISTS `ac_css`; + +# Store just for Access Control +CREATE TABLE `lk_css` ( + `id` INT NOT NULL AUTO_INCREMENT, + `rid` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `v` varchar(780) NOT NULL, + PRIMARY KEY (`id`), + KEY `rowkey` (`rid`,`cid`), + KEY `cid_locate_i` (`v`(255),`cid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE ac_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE au_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE cn_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE lk_css_w ( + `rid` varchar(32) NOT NULL, + primary key(`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +CREATE TABLE css_wr ( + `id` INT NOT NULL AUTO_INCREMENT, + `cf` varchar(32) NOT NULL, + `cid` varchar(64) NOT NULL, + `cname` varchar(64) NOT NULL, + primary key(`id`), + unique key css_r_cid (`cf`,`cid`), + unique key css_r_cnam (`cf`,`cname`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + +# Body Store. In some cases we want to store the bodies of the objects in a binary serialized lump +# This allows us to load and save the sparse map without using multiple records in the above tables and hence is more compact +# And uses less bandwidth to the DB. +# Where this is done, we still index certain fields as defined in index_cols + +CREATE TABLE `css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + primary key (`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `cn_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + primary key (`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `au_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + primary key (`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `ac_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + primary key (`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + +# Central Store for Object bodies, serialized content maps rather than columns +CREATE TABLE `lk_css_b` ( + `rid` varchar(32) NOT NULL, + `b` mediumblob, + primary key (`rid`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; + + diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.sql b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.sql new file mode 100644 index 00000000..805ffda7 --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.sql @@ -0,0 +1,166 @@ +# SQL statements of the form key[.keyspace.columnfamily.[rowID0-2]] +# the based key should always be present +# the keyspace.columnfamily selectors are used to shard the column family (optional) +# the rowID0-2 is to shard on rowID, you can selectively shard hot rowID areas. +# If sharding ensure that any exiting data is migrated (using SQL DML) and that the finder statements are adjusted to incorporate the shards (warning, might be hard) +# Indexer statements +delete-string-row = delete from css where rid = ? +delete-string-row.n.ac = delete from ac_css where rid = ? +delete-string-row.n.au = delete from au_css where rid = ? +delete-string-row.n.cn = delete from cn_css where rid = ? +delete-string-row.n.lk = delete from lk_css where rid = ? +select-string-row = select cid, v from css where rid = ? +select-string-row.n.ac = select cid, v from ac_css where rid = ? +select-string-row.n.au = select cid, v from au_css where rid = ? +select-string-row.n.cn = select cid, v from cn_css where rid = ? +select-string-row.n.lk = select cid, v from lk_css where rid = ? +insert-string-column = insert into css ( v, rid, cid) values ( ?, ?, ? ) +insert-string-column.n.ac = insert into ac_css ( v, rid, cid) values ( ?, ?, ? ) +insert-string-column.n.au = insert into au_css ( v, rid, cid) values ( ?, ?, ? ) +insert-string-column.n.cn = insert into cn_css ( v, rid, cid) values ( ?, ?, ? ) +insert-string-column.n.lk = insert into lk_css ( v, rid, cid) values ( ?, ?, ? ) +update-string-column = update css set v = ? where rid = ? and cid = ? +update-string-column.n.ac = update ac_css set v = ? where rid = ? and cid = ? +update-string-column.n.au = update au_css set v = ? where rid = ? and cid = ? +update-string-column.n.cn = update cn_css set v = ? where rid = ? and cid = ? +update-string-column.n.lk = update lk_css set v = ? where rid = ? and cid = ? +remove-string-column = delete from css where rid = ? and cid = ? +remove-string-column.n.ac = delete from ac_css where rid = ? and cid = ? +remove-string-column.n.au = delete from au_css where rid = ? and cid = ? +remove-string-column.n.cn = delete from cn_css where rid = ? and cid = ? +remove-string-column.n.lk = delete from lk_css where rid = ? and cid = ? +# Example of a sharded query, rowIDs starting with x will use this +### remove-string-column.n.cn._X = delete from cn_css_X where rid = ? and cid = ? + +# base statement with paging ; table join ; where clause ; where clause for sort field (if needed) ; order by clause +find = select a.rid, a.cid, a.v from css a {0} where {1} 1 = 1 limit {2,number,#} offset {3,number,#};, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.ac = select a.rid, a.cid, a.v from ac_css a {0} where {1} 1 = 1 limit {2,number,#} offset {3,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.au = select a.rid, a.cid, a.v from au_css a {0} where {1} 1 = 1 limit {2,number,#} offset {3,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.cn = select a.rid, a.cid, a.v from cn_css a {0} where {1} 1 = 1 limit {2,number,#} offset {3,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.lk = select a.rid, a.cid, a.v from lk_css a {0} where {1} 1 = 1 limit {2,number,#} offset {3,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} + + +block-select-row = select b from css_b where rid = ? +block-delete-row = delete from css_b where rid = ? +block-insert-row = insert into css_b (rid,b) values (?, ?) +block-update-row = update css_b set b = ? where rid = ? +list-all = select rid, b from css_b +list-all-count = select count(*) from css_b + +block-select-row.n.ac = select b from ac_css_b where rid = ? +block-delete-row.n.ac = delete from ac_css_b where rid = ? +block-insert-row.n.ac = insert into ac_css_b (rid,b) values (?, ?) +block-update-row.n.ac = update ac_css_b set b = ? where rid = ? +list-all.n.ac = select rid, b from ac_css_b +list-all-count.n.ac = select count(*) from ac_css_b + +block-select-row.n.cn = select b from cn_css_b where rid = ? +block-delete-row.n.cn = delete from cn_css_b where rid = ? +block-insert-row.n.cn = insert into cn_css_b (rid,b) values (?, ?) +block-update-row.n.cn = update cn_css_b set b = ? where rid = ? +list-all.n.cn = select rid, b from cn_css_b +list-all-count.n.cn = select count(*) from cn_css_b + +block-select-row.n.au = select b from au_css_b where rid = ? +block-delete-row.n.au = delete from au_css_b where rid = ? +block-insert-row.n.au = insert into au_css_b (rid,b) values (?, ?) +block-update-row.n.au = update au_css_b set b = ? where rid = ? +list-all.n.au = select rid, b from au_css_b +list-all-count.n.au = select count(*) from au_css_b + +block-select-row.n.lk = select b from lk_css_b where rid = ? +block-delete-row.n.lk = delete from lk_css_b where rid = ? +block-insert-row.n.lk = insert into lk_css_b (rid,b) values (?, ?) +block-update-row.n.lk = update lk_css_b set b = ? where rid = ? +list-all.n.lk = select rid, b from lk_css_b +list-all-count.n.lk = select count(*) from lk_css_b + +# +# These are finder statements +# base statement with paging ; table join ; where clause ; where clause for sort field (if needed) ; order by clause +block-find = select distinct a.rid from css a {0} where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +block-find.n.au = select distinct a.rid from au_css a {0} where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +block-find.n.cn = select distinct a.rid from cn_css a {0} where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +block-find.n.ac = select distinct a.rid from ac_css a {0} where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +block-find.n.lk = select distinct a.rid from lk_css a {0} where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} + +listchildren = select distinct a.rid from css a {0} where {1} 1 = 1 {2} ;, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +listchildren.n.au = select distinct a.rid from au_css a {0} where {1} 1 = 1 {2} ;, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +listchildren.n.cn = select distinct a.rid from cn_css a {0} where {1} 1 = 1 {2} ;, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +listchildren.n.ac = select distinct a.rid from ac_css a {0} where {1} 1 = 1 {2} ;, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +listchildren.n.lk = select distinct a.rid from lk_css a {0} where {1} 1 = 1 {2} ;, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} + +# This custom finder statement outputs 1 row which is the count of number of rows. +countestimate = select count(*) from (select distinct a.rid from css a {0} where {1} 1 = 1 {2}) as tocount ;, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +countestimate.n.au = select count(*) from (select distinct a.rid from au_css a {0} where {1} 1 = 1 {2}) as tocount ;, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +countestimate.n.cn = select count(*) from (select distinct a.rid from cn_css a {0} where {1} 1 = 1 {2}) as tocount ;, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +countestimate.n.ac = select count(*) from (select distinct a.rid from ac_css a {0} where {1} 1 = 1 {2}) as tocount ;, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +countestimate.n.lk = select count(*) from (select distinct a.rid from lk_css a {0} where {1} 1 = 1 {2}) as tocount ;, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} + +# statement to validate the connection +validate = select 1 + +# What type of rowID has should be used. Must be non colliding (reasonable probability), cant be changed once set without data migration. +# SHA-1 has a 1:10E14 probability of collision, so IMVHO is Ok here. Do not use MD5, it will collide. +rowid-hash = SHA1 + +# statement to check that the schema exists +check-schema = select count(*) from css + +# Use batch Inserts means that update operations will be performed as batches rather than single SQL statements. This only really effects the update of +# Index tables and not the content store but it will reduce the number of SQL operations where more than one field is indexed per content item. +use-batch-inserts = 1 + +# Queries that take longer than these times to execute will be logged with warn and error respectively. +# Logging is performed against org.sakaiproject.nakamura.lite.storage.spi.jdbc.JDBCStorageClient.SlowQueryLogger +slow-query-time = 50 +very-slow-query-time = 100 + +index-column-name-select = select cf, cid, cname from css_wr +index-column-name-insert = insert into css_wr ( cf, cid, cname ) values ( ? , ? , ? ) +alter-widestring-table = ALTER TABLE {0}_css_w ADD {1} varchar(780) +index-widestring-table = CREATE INDEX {0}_css_w_{1} ON {0}_css_w ({1}) + +exists-widestring-row = select rid from css_w where rid = ? +exists-widestring-row.n.cn = select rid from cn_css_w where rid = ? +exists-widestring-row.n.ac = select rid from ac_css_w where rid = ? +exists-widestring-row.n.au = select rid from au_css_w where rid = ? +exists-widestring-row.n.lk = select rid from lk_css_w where rid = ? + +delete-widestring-row = delete from css_w where rid = ? +delete-widestring-row.n.cn = delete from cn_css_w where rid = ? +delete-widestring-row.n.ac = delete from ac_css_w where rid = ? +delete-widestring-row.n.au = delete from au_css_w where rid = ? +delete-widestring-row.n.lk = delete from lk_css_w where rid = ? + +update-widestring-row = update css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.cn = update cn_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.ac = update ac_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.au = update au_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.lk = update lk_css_w set {0} where rid = ?; {0} = ? + + +insert-widestring-row = insert into css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.cn = insert into cn_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.ac = insert into ac_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.au = insert into au_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.lk = insert into lk_css_w ( rid {0} ) values ( ? {1} ) + +wide-block-find = select a.rid from css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.cn = select a.rid from cn_css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.ac = select a.rid from ac_css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.au = select a.rid from au_css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.lk = select a.rid from lk_css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} + +wide-listchildren = select a.rid from css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.cn = select a.rid from cn_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.ac = select a.rid from ac_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.au = select a.rid from au_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.lk = select a.rid from lk_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} + +wide-countestimate = select count(*) from css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.cn = select count(*) from cn_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.ac = select count(*) from ac_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.au = select count(*) from au_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.lk = select count(*) from lk_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} + diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.Oracle.ddl b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.Oracle.ddl new file mode 100644 index 00000000..cc2f4fb9 --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.Oracle.ddl @@ -0,0 +1,170 @@ +########### DROP TABLE css cascade constraints; + +CREATE TABLE css ( + id NUMBER NOT NULL, + rid varchar2(32) NOT NULL, + cid varchar2(64) NOT NULL, + v varchar2(780) NOT NULL, + PRIMARY KEY (id)); + +CREATE SEQUENCE seq_css_id; + +# Oracle creates b-tree indices by default (I think) +CREATE INDEX css_rowkey ON css(rid,cid); +# Can't create an index on a substring of a field from what I can see. +# Something else may be intended with cid_locate_i by Ian. He can correct me. +CREATE INDEX css_cid_locate_i ON css(v,cid); + +############ DROP TABLE au_css cascade constraints; + +CREATE TABLE au_css ( + id NUMBER NOT NULL, + rid varchar2(32) NOT NULL, + cid varchar2(64) NOT NULL, + v varchar2(780) NOT NULL, + PRIMARY KEY (id)); + +CREATE SEQUENCE seq_au_css_id; + +CREATE INDEX au_css_rowkey ON au_css(rid,cid); +CREATE INDEX au_css_cid_locate_i ON au_css(v,cid); + +######### DROP TABLE cn_css cascade constraints; + +CREATE TABLE cn_css ( + id NUMBER NOT NULL, + rid varchar2(32) NOT NULL, + cid varchar2(64) NOT NULL, + v varchar2(780) NOT NULL, + PRIMARY KEY (id)) +; + +CREATE SEQUENCE seq_cn_css_id; + +CREATE INDEX cn_css_rowkey ON cn_css(rid,cid); +CREATE INDEX cn_css_cid_locate_i ON cn_css(v,cid); + +########### DROP TABLE ac_css cascade constraints; + +CREATE TABLE ac_css ( + id NUMBER NOT NULL, + rid varchar2(32) NOT NULL, + cid varchar2(64) NOT NULL, + v varchar2(780) NOT NULL, + PRIMARY KEY (id)) +; + +CREATE SEQUENCE seq_ac_css_id; + +CREATE INDEX ac_css_rowkey ON ac_css(rid,cid); +CREATE INDEX ac_css_cid_locate_i ON ac_css(v,cid); + +########### DROP TABLE lk_css cascade constraints; + +CREATE TABLE lk_css ( + id NUMBER NOT NULL, + rid varchar2(32) NOT NULL, + cid varchar2(64) NOT NULL, + v varchar2(780) NOT NULL, + PRIMARY KEY (id)) +; + +CREATE SEQUENCE seq_lk_css_id; + +CREATE INDEX lk_css_rowkey ON lk_css(rid,cid); +CREATE INDEX lk_css_cid_locate_i ON lk_css(v,cid); + +########### DROP TABLE css_w cascade constraints; + +CREATE TABLE css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + +########### DROP TABLE ac_css_w cascade constraints; + +CREATE TABLE ac_css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + +########### DROP TABLE au_css_w cascade constraints; + +CREATE TABLE au_css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + +########### DROP TABLE cn_css_w cascade constraints; + +CREATE TABLE cn_css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + +########### DROP TABLE lk_css_w cascade constraints; + +CREATE TABLE lk_css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + + +########### DROP TABLE css_wr cascade constraints; + +CREATE TABLE css_wr ( + id NUMBER NOT NULL, + cf varchar2(32) NOT NULL, + cid varchar2(64) NOT NULL, + cname varchar2(64) NOT NULL, + primary key(id)); + + +CREATE SEQUENCE seq_css_wr_id; + +CREATE UNIQUE INDEX css_wr_cid ON css_wr(cf,cid); +CREATE UNIQUE INDEX css_wr_cnam ON css_wr(cf,cname); + + +########### DROP TABLE css_b cascade constraints; + +CREATE TABLE css_b ( + rid varchar2(32) NOT NULL, + b blob, + PRIMARY KEY (rid) ) +; + +########## DROP TABLE cn_css_b cascade constraints; + +CREATE TABLE cn_css_b ( + rid varchar2(32) NOT NULL, + b blob, + PRIMARY KEY (rid) ) +; + +########### DROP TABLE au_css_b cascade constraints; + +CREATE TABLE au_css_b ( + rid varchar2(32) NOT NULL, + b blob, + PRIMARY KEY (rid) ) +; + +########### DROP TABLE ac_css_b cascade constraints; + +CREATE TABLE ac_css_b ( + rid varchar2(32) NOT NULL, + b blob, + PRIMARY KEY (rid) ) +; + + +########### DROP TABLE lk_css_b cascade constraints; + +CREATE TABLE lk_css_b ( + rid varchar2(32) NOT NULL, + b blob, + PRIMARY KEY (rid) ) +; + + diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.Oracle.sql b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.Oracle.sql new file mode 100644 index 00000000..f72c2872 --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.Oracle.sql @@ -0,0 +1,170 @@ +# SQL statements of the form key[.keyspace.columnfamily.[rowID0-2]] +# the based key should always be present +# the keyspace.columnfamily selectors are used to shard the column family (optional) +# the rowID0-2 is to shard on rowID, you can selectively shard hot rowID areas. +# If sharding ensure that any exiting data is migrated (using SQL DML) and that the finder statements are adjusted to incorporate the shards (warning, might be hard) +# Indexer statements +delete-string-row = delete from css where rid = ? +delete-string-row.n.ac = delete from ac_css where rid = ? +delete-string-row.n.au = delete from au_css where rid = ? +delete-string-row.n.cn = delete from cn_css where rid = ? +delete-string-row.n.lk = delete from lk_css where rid = ? +select-string-row = select cid, v from css where rid = ? +select-string-row.n.ac = select cid, v from ac_css where rid = ? +select-string-row.n.au = select cid, v from au_css where rid = ? +select-string-row.n.cn = select cid, v from cn_css where rid = ? +select-string-row.n.lk = select cid, v from lk_css where rid = ? +insert-string-column = insert into css (id, v, rid, cid) values (seq_css_id.NEXTVAL, ?, ?, ? ) +insert-string-column.n.ac = insert into ac_css (id, v, rid, cid) values (seq_ac_css_id.NEXTVAL, ?, ?, ? ) +insert-string-column.n.au = insert into au_css (id, v, rid, cid) values (seq_au_css_id.NEXTVAL, ?, ?, ? ) +insert-string-column.n.cn = insert into cn_css (id, v, rid, cid) values (seq_cn_css_id.NEXTVAL, ?, ?, ? ) +insert-string-column.n.lk = insert into cn_css (id, v, rid, cid) values (seq_lk_css_id.NEXTVAL, ?, ?, ? ) +update-string-column = update css set v = ? where rid = ? and cid = ? +update-string-column.n.ac = update ac_css set v = ? where rid = ? and cid = ? +update-string-column.n.au = update au_css set v = ? where rid = ? and cid = ? +update-string-column.n.cn = update cn_css set v = ? where rid = ? and cid = ? +update-string-column.n.lk = update lk_css set v = ? where rid = ? and cid = ? +remove-string-column = delete from css where rid = ? and cid = ? +remove-string-column.n.ac = delete from ac_css where rid = ? and cid = ? +remove-string-column.n.au = delete from au_css where rid = ? and cid = ? +remove-string-column.n.cn = delete from cn_css where rid = ? and cid = ? +remove-string-column.n.lk = delete from lk_css where rid = ? and cid = ? +# Example of a sharded query, rowIDs starting with x will use this +### remove-string-column.n.cn._X = delete from cn_css_X where rid = ? and cid = ? + +# base statement with paging ; table join ; where clause ; where clause for sort field (if needed) ; order by clause +find.n.au = select TR.rid, TR.cid, TR.v from (select a.rid, a.cid, a.v, ROWNUM rnum from au_css where {1} 1 = 1 {2}) TR where rnum > {4,number,#} and rnum <= {3,number,#}+{4,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.ac = select TR.rid, TR.cid, TR.v from (select a.rid, a.cid, a.v, ROWNUM rnum from ac_css where {1} 1 = 1 {2}) TR where rnum > {4,number,#} and rnum <= {3,number,#}+{4,number,#};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.cn = select TR.rid, TR.cid, TR.v from (select a.rid, a.cid, a.v, ROWNUM rnum from cn_css where {1} 1 = 1 {2}) TR where rnum > {4,number,#} and rnum <= {3,number,#}+{4,number,#};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.lk = select TR.rid, TR.cid, TR.v from (select a.rid, a.cid, a.v, ROWNUM rnum from lk_css where {1} 1 = 1 {2}) TR where rnum > {4,number,#} and rnum <= {3,number,#}+{4,number,#};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} + +block-select-row = select b from css_b where rid = ? +block-delete-row = delete from css_b where rid = ? +block-insert-row = insert into css_b (rid,b) values (?, ?) +block-update-row = update css_b set b = ? where rid = ? +list-all = select rid, b from css_b +list-all-count = select count(*) from css_b + +block-select-row.n.ac = select b from ac_css_b where rid = ? +block-delete-row.n.ac = delete from ac_css_b where rid = ? +block-insert-row.n.ac = insert into ac_css_b (rid,b) values (?, ?) +block-update-row.n.ac = update ac_css_b set b = ? where rid = ? +list-all.n.ac = select rid, b from ac_css_b +list-all-count.n.ac = select count(*) from ac_css_b + +block-select-row.n.cn = select b from cn_css_b where rid = ? +block-delete-row.n.cn = delete from cn_css_b where rid = ? +block-insert-row.n.cn = insert into cn_css_b (rid,b) values (?, ?) +block-update-row.n.cn = update cn_css_b set b = ? where rid = ? +list-all.n.cn = select rid, b from cn_css_b +list-all-count.n.cn = select count(*) from cn_css_b + +block-select-row.n.au = select b from au_css_b where rid = ? +block-delete-row.n.au = delete from au_css_b where rid = ? +block-insert-row.n.au = insert into au_css_b (rid,b) values (?, ?) +block-update-row.n.au = update au_css_b set b = ? where rid = ? +list-all.n.au = select rid, b from au_css_b +list-all-count.n.au = select count(*) from au_css_b + +block-select-row.n.lk = select b from lk_css_b where rid = ? +block-delete-row.n.lk = delete from lk_css_b where rid = ? +block-insert-row.n.lk = insert into lk_css_b (rid,b) values (?, ?) +block-update-row.n.lk = update lk_css_b set b = ? where rid = ? +list-all.n.lk = select rid, b from lk_css_b +list-all-count.n.lk = select count(*) from lk_css_b + +# base statement with paging ; table join ; where clause ; where clause for sort field (if needed) ; order by clause; sort field column( if needed) +## the subselect in the paging statement is required by Oracle to do paging. http://www.oracle.com/technetwork/issue-archive/2006/06-sep/o56asktom-086197.html +block-find = select TR.rid from ( select s.rid, ROWNUM rnum from (select distinct a.rid {5} from css a {0} where {1} 1 = 1 {2}) s where ROWNUM <= {3,number,#}+{4,number,#}) TR where rnum >= {4,number,#};, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +block-find.n.au = select TR.rid from ( select s.rid, ROWNUM rnum from (select distinct a.rid {5} from au_css a {0} where {1} 1 = 1 {2}) s where ROWNUM <= {3,number,#}+{4,number,#}) TR where rnum > {4,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +block-find.n.cn = select TR.rid from ( select s.rid, ROWNUM rnum from (select distinct a.rid {5} from cn_css a {0} where {1} 1 = 1 {2}) s where ROWNUM <= {3,number,#}+{4,number,#}) TR where rnum > {4,number,#};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +block-find.n.ac = select TR.rid from ( select s.rid, ROWNUM rnum from (select distinct a.rid {5} from ac_css a {0} where {1} 1 = 1 {2}) s where ROWNUM <= {3,number,#}+{4,number,#}) TR where rnum > {4,number,#};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +block-find.n.lk = select TR.rid from ( select s.rid, ROWNUM rnum from (select distinct a.rid {5} from lk_css a {0} where {1} 1 = 1 {2}) s where ROWNUM <= {3,number,#}+{4,number,#}) TR where rnum > {4,number,#};, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v + +listchildren = select distinct a.rid {5} from css a {0} where {1} 1 = 1 {2};, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +listchildren.n.au = select distinct a.rid {5} from au_css a {0} where {1} 1 = 1 {2};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +listchildren.n.cn = select distinct a.rid {5} from cn_css a {0} where {1} 1 = 1 {2};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +listchildren.n.ac = select distinct a.rid {5} from ac_css a {0} where {1} 1 = 1 {2};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +listchildren.n.lk = select distinct a.rid {5} from lk_css a {0} where {1} 1 = 1 {2};, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v + +countestimate = select count(*) from (select distinct a.rid {5} from css a {0} where {1} 1 = 1 {2});, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +countestimate.n.au = select count(*) from (select distinct a.rid {5} from au_css a {0} where {1} 1 = 1 {2});, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +countestimate.n.cn = select count(*) from (select distinct a.rid {5} from cn_css a {0} where {1} 1 = 1 {2});, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +countestimate.n.ac = select count(*) from (select distinct a.rid {5} from ac_css a {0} where {1} 1 = 1 {2});, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +countestimate.n.lk = select count(*) from (select distinct a.rid {5} from lk_css a {0} where {1} 1 = 1 {2});, lk_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v + + +# statement to validate the connection +validate = select 1 from DUAL + +# What type of rowID has should be used. Must be non colliding (reasonable probability), cant be changed once set without data migration. +# SHA-1 has a 1:10E14 probability of collision, so IMVHO is Ok here. Do not use MD5, it will collide. +rowid-hash = SHA1 + +# statement to check that the schema exists +check-schema = select count(*) from css + +# Use batch Inserts means that update operations will be performed as batches rather than single SQL statements. This only really effects the update of +# Index tables and not the content store but it will reduce the number of SQL operations where more than one field is indexed per content item. +use-batch-inserts = 1 + +# this property indicates which version of the JRE your JDBC driver targets +# e.g. the driver for Oracle 10g does not support JDBC methods introduced in JRE 1.6 +jdbc-support-level = 1.5 + +# Oracle (well Larry) decided that Oracle names would never need to be > 30 chars, so we need to make the index names short. +# This may cause clashes which will need further work. +sql-name-padding = false +sql-max-name-length = 25 + +index-column-name-select = select cf, cid, cname from css_wr +index-column-name-insert = insert into css_wr ( id, cf, cid, cname ) values ( seq_css_wr_id.NEXTVAL, ? , ? , ? ) +alter-widestring-table = ALTER TABLE {0}_css_w ADD {1} varchar(780) +index-widestring-table = CREATE INDEX {0}wr{1} ON {0}_css_w ({1}) + +exists-widestring-row = select rid from css_w where rid = ? +exists-widestring-row.n.cn = select rid from cn_css_w where rid = ? +exists-widestring-row.n.ac = select rid from ac_css_w where rid = ? +exists-widestring-row.n.au = select rid from au_css_w where rid = ? +exists-widestring-row.n.lk = select rid from lk_css_w where rid = ? + +delete-widestring-row = delete from css_w where rid = ? +delete-widestring-row.n.cn = delete from cn_css_w where rid = ? +delete-widestring-row.n.ac = delete from ac_css_w where rid = ? +delete-widestring-row.n.au = delete from au_css_w where rid = ? +delete-widestring-row.n.lk = delete from lk_css_w where rid = ? + +update-widestring-row = update css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.cn = update cn_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.ac = update ac_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.au = update au_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.lk = update lk_css_w set {0} where rid = ?; {0} = ? + + +insert-widestring-row = insert into css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.cn = insert into cn_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.ac = insert into ac_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.au = insert into au_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.lk = insert into lk_css_w ( rid {0} ) values ( ? {1} ) + + +wide-block-find = select TR.rid from ( select s.rid, ROWNUM rnum from (select a.rid from css_w a where {0} {1} ) s where ROWNUM <= {2,number,#}+{3,number,#}) TR where rnum >= {3,number,#};a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.cn = select TR.rid from ( select s.rid, ROWNUM rnum from (select a.rid from cn_css_w a where {0} {1} ) s where ROWNUM <= {2,number,#}+{3,number,#}) TR where rnum >= {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.ac = select TR.rid from ( select s.rid, ROWNUM rnum from (select a.rid from ac_css_w a where {0} {1} ) s where ROWNUM <= {2,number,#}+{3,number,#}) TR where rnum >= {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.au = select TR.rid from ( select s.rid, ROWNUM rnum from (select a.rid from au_css_w a where {0} {1} ) s where ROWNUM <= {2,number,#}+{3,number,#}) TR where rnum >= {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.lk = select TR.rid from ( select s.rid, ROWNUM rnum from (select a.rid from lk_css_w a where {0} {1} ) s where ROWNUM <= {2,number,#}+{3,number,#}) TR where rnum >= {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} + +wide-listchildren = select a.rid from css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.cn = select a.rid from cn_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.ac = select a.rid from ac_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.au = select a.rid from au_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.lk = select a.rid from lk_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} + +wide-countestimate = select count(*) from css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.cn = select count(*) from cn_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.ac = select count(*) from ac_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.au = select count(*) from au_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.lk = select count(*) from lk_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} + + + diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.PostgreSQL.ddl b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.PostgreSQL.ddl new file mode 100644 index 00000000..a30850a1 --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.PostgreSQL.ddl @@ -0,0 +1,238 @@ +########### DROP TABLE css; + + +CREATE TABLE css +( + id serial, + rid character varying(32) NOT NULL, + cid character varying(64) NOT NULL, + v character varying(780) NOT NULL, + CONSTRAINT css_pk PRIMARY KEY (id) +); + +ALTER TABLE css OWNER TO nakamura; +GRANT ALL ON TABLE css TO nakrole; + +CREATE INDEX css_cid_locate_i ON css (v, cid); +CREATE INDEX css_rowkey ON css (rid, cid); + + + +############ DROP TABLE au_css; + + +CREATE TABLE au_css +( + id serial, + rid character varying(32) NOT NULL, + cid character varying(64) NOT NULL, + v character varying(780) NOT NULL, + CONSTRAINT au_css_pk PRIMARY KEY (id) +); + +ALTER TABLE au_css OWNER TO nakamura; +GRANT ALL ON TABLE au_css TO nakrole; + +CREATE INDEX au_css_cid_locate_i ON au_css (v, cid); +CREATE INDEX au_css_rowkey ON au_css (rid, cid); + + +######### DROP TABLE cn_css; + + + +CREATE TABLE cn_css +( + id serial, + rid character varying(32) NOT NULL, + cid character varying(64) NOT NULL, + v character varying(780) NOT NULL, + CONSTRAINT cn_css_pk PRIMARY KEY (id) +); + +ALTER TABLE cn_css OWNER TO nakamura; +GRANT ALL ON TABLE cn_css TO nakrole; + +CREATE INDEX cn_css_cid_locate_i ON cn_css (v, cid); +CREATE INDEX cn_css_rowkey ON cn_css (rid, cid); + + + +########### DROP TABLE ac_css; + + + +CREATE TABLE ac_css +( + id serial, + rid character varying(32) NOT NULL, + cid character varying(64) NOT NULL, + v character varying(780) NOT NULL, + CONSTRAINT ac_css_pk PRIMARY KEY (id) +); + +ALTER TABLE ac_css OWNER TO nakamura; +GRANT ALL ON TABLE ac_css TO nakrole; + +CREATE INDEX ac_css_cid_locate_i ON ac_css (v, cid); +CREATE INDEX ac_css_rowkey ON ac_css (rid, cid); + +########### DROP TABLE lk_css; + + + +CREATE TABLE lk_css +( + id serial, + rid character varying(32) NOT NULL, + cid character varying(64) NOT NULL, + v character varying(780) NOT NULL, + CONSTRAINT lk_css_pk PRIMARY KEY (id) +); + +ALTER TABLE lk_css OWNER TO nakamura; +GRANT ALL ON TABLE lk_css TO nakrole; + +CREATE INDEX lk_css_cid_locate_i ON lk_css (v, cid); +CREATE INDEX lk_css_rowkey ON lk_css (rid, cid); + + +CREATE TABLE css_w ( + rid varchar(32) NOT NULL, + constraint css_w_pk primary key(rid) +); + +ALTER TABLE css_w OWNER TO nakamura; +GRANT ALL ON TABLE css_w TO nakrole; + + +CREATE TABLE ac_css_w ( + rid varchar(32) NOT NULL, + constraint ac_css_w_pk primary key(rid) +); + +ALTER TABLE ac_css_w OWNER TO nakamura; +GRANT ALL ON TABLE ac_css_w TO nakrole; + + +CREATE TABLE au_css_w ( + rid varchar(32) NOT NULL, + constraint au_css_w_pk primary key(rid) +); + +ALTER TABLE au_css_w OWNER TO nakamura; +GRANT ALL ON TABLE au_css_w TO nakrole; + + +CREATE TABLE cn_css_w ( + rid varchar(32) NOT NULL, + constraint cn_css_w_pk primary key(rid) +); + +ALTER TABLE cn_css_w OWNER TO nakamura; +GRANT ALL ON TABLE cn_css_w TO nakrole; + +CREATE TABLE lk_css_w ( + rid varchar(32) NOT NULL, + constraint lk_css_w_pk primary key(rid) +); + +ALTER TABLE lk_css_w OWNER TO nakamura; +GRANT ALL ON TABLE lk_css_w TO nakrole; + + +CREATE TABLE css_wr ( + id serial, + cf varchar(32) NOT NULL, + cid varchar(64) NOT NULL, + cname varchar(64) NOT NULL, + constraint css_wr_pk primary key(id), + constraint css_wr_cid unique (cf,cid), + constraint css_wr_cnam unique (cf,cname) +); + +ALTER TABLE css_wr OWNER TO nakamura; +GRANT ALL ON TABLE css_wr TO nakrole; + + +########### DROP TABLE css_b; + + +CREATE TABLE css_b ( + id serial, + rid character varying(32) NOT NULL, + b bytea, + CONSTRAINT css_b_pk PRIMARY KEY (id), + CONSTRAINT css_b_rid_uk UNIQUE (rid) +); + +ALTER TABLE css_b OWNER TO nakamura; +GRANT ALL ON TABLE css_b TO nakrole; + + +########## DROP TABLE cn_css_b; + + +CREATE TABLE cn_css_b +( + id serial, + rid character varying(32) NOT NULL, + b bytea, + CONSTRAINT cn_css_b_pk PRIMARY KEY (id), + CONSTRAINT cn_css_b_rid_uk UNIQUE (rid) +); + +ALTER TABLE cn_css_b OWNER TO nakamura; +GRANT ALL ON TABLE cn_css_b TO nakrole; + + + +########### DROP TABLE au_css_b; + + +CREATE TABLE au_css_b +( + id serial, + rid character varying(32) NOT NULL, + b bytea, + CONSTRAINT au_css_b_pk PRIMARY KEY (id), + CONSTRAINT au_css_b_rid_uk UNIQUE (rid) +); + +ALTER TABLE au_css_b OWNER TO nakamura; +GRANT ALL ON TABLE au_css_b TO nakrole; + + + + +########### DROP TABLE ac_css_b; + + +CREATE TABLE ac_css_b +( + id serial, + rid character varying(32) NOT NULL, + b bytea, + CONSTRAINT ac_css_b_pk PRIMARY KEY (id), + CONSTRAINT ac_css_b_rid_uk UNIQUE (rid) +); + +ALTER TABLE ac_css_b OWNER TO nakamura; +GRANT ALL ON TABLE ac_css_b TO nakrole; + +########### DROP TABLE lk_css_b; + + +CREATE TABLE lk_css_b +( + id serial, + rid character varying(32) NOT NULL, + b bytea, + CONSTRAINT lk_css_b_pk PRIMARY KEY (id), + CONSTRAINT lk_css_b_rid_uk UNIQUE (rid) +); + +ALTER TABLE lk_css_b OWNER TO nakamura; +GRANT ALL ON TABLE lk_css_b TO nakrole; + + diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.PostgreSQL.sql b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.PostgreSQL.sql new file mode 100644 index 00000000..fe3d43f4 --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.PostgreSQL.sql @@ -0,0 +1,152 @@ +# SQL statements of the form key[.keyspace.columnfamily.[rowID0-2]] +# the based key should always be present +# the keyspace.columnfamily selectors are used to shard the column family (optional) +# the rowID0-2 is to shard on rowID, you can selectively shard hot rowID areas. +# If sharding ensure that any exiting data is migrated (using SQL DML) and that the finder statements are adjusted to incorporate the shards (warning, might be hard) +# Indexer statements +delete-string-row = delete from css where rid = ? +delete-string-row.n.ac = delete from ac_css where rid = ? +delete-string-row.n.au = delete from au_css where rid = ? +delete-string-row.n.cn = delete from cn_css where rid = ? +select-string-row = select cid, v from css where rid = ? +select-string-row.n.ac = select cid, v from ac_css where rid = ? +select-string-row.n.au = select cid, v from au_css where rid = ? +select-string-row.n.cn = select cid, v from cn_css where rid = ? +insert-string-column = insert into css (v, rid, cid) values ( ?, ?, ? ) +insert-string-column.n.ac = insert into ac_css (v, rid, cid) values ( ?, ?, ? ) +insert-string-column.n.au = insert into au_css (v, rid, cid) values ( ?, ?, ? ) +insert-string-column.n.cn = insert into cn_css (v, rid, cid) values ( ?, ?, ? ) +update-string-column = update css set v = ? where rid = ? and cid = ? +update-string-column.n.ac = update ac_css set v = ? where rid = ? and cid = ? +update-string-column.n.au = update au_css set v = ? where rid = ? and cid = ? +update-string-column.n.cn = update cn_css set v = ? where rid = ? and cid = ? +remove-string-column = delete from css where rid = ? and cid = ? +remove-string-column.n.ac = delete from ac_css where rid = ? and cid = ? +remove-string-column.n.au = delete from au_css where rid = ? and cid = ? +remove-string-column.n.cn = delete from cn_css where rid = ? and cid = ? +# Example of a sharded query, rowIDs starting with x will use this +### remove-string-column.n.cn._X = delete from cn_css_X where rid = ? and cid = ? + +# base statement with paging ; table join ; where clause ; where clause for sort field (if needed) ; order by clause +find.n.au = select a.rid, a.cid, a.v from au_css where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.ac = select a.rid, a.cid, a.v from ac_css where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} +find.n.cn = select a.rid, a.cid, a.v from cn_css where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} + +block-select-row = select b from css_b where rid = ? +block-delete-row = delete from css_b where rid = ? +block-insert-row = insert into css_b (rid,b) values (?, ?) +block-update-row = update css_b set b = ? where rid = ? +list-all = select rid, b from css_b +list-all-count = select count(*) from css_b + +block-select-row.n.ac = select b from ac_css_b where rid = ? +block-delete-row.n.ac = delete from ac_css_b where rid = ? +block-insert-row.n.ac = insert into ac_css_b (rid,b) values (?, ?) +block-update-row.n.ac = update ac_css_b set b = ? where rid = ? +list-all.n.ac = select rid, b from ac_css_b +list-all-count.n.ac = select count(*) from ac_css_b + +block-select-row.n.cn = select b from cn_css_b where rid = ? +block-delete-row.n.cn = delete from cn_css_b where rid = ? +block-insert-row.n.cn = insert into cn_css_b (rid,b) values (?, ?) +block-update-row.n.cn = update cn_css_b set b = ? where rid = ? +list-all.n.cn = select rid, b from cn_css_b +list-all-count.n.cn = select count(*) from cn_css_b + +block-select-row.n.au = select b from au_css_b where rid = ? +block-delete-row.n.au = delete from au_css_b where rid = ? +block-insert-row.n.au = insert into au_css_b (rid,b) values (?, ?) +block-update-row.n.au = update au_css_b set b = ? where rid = ? +list-all.n.au = select rid, b from au_css_b +list-all-count.n.au = select count(*) from au_css_b + +# base statement with paging ; table join ; where clause ; where clause for sort field (if needed) ; order by clause; sort field column( if needed) +## the subselect in the paging statement is required by Oracle to do paging. http://www.oracle.com/technetwork/issue-archive/2006/06-sep/o56asktom-086197.html +block-find = select distinct a.rid {5} from css a {0} where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +block-find.n.au = select distinct a.rid {5} from au_css a {0} where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +block-find.n.cn = select distinct a.rid {5} from cn_css a {0} where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +block-find.n.ac = select distinct a.rid {5} from ac_css a {0} where {1} 1 = 1 {2} limit {3,number,#} offset {4,number,#};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v + +listchildren = select distinct a.rid {5} from css a {0} where {1} 1 = 1 {2};, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +listchildren.n.au = select distinct a.rid {5} from au_css a {0} where {1} 1 = 1 {2};, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +listchildren.n.cn = select distinct a.rid {5} from cn_css a {0} where {1} 1 = 1 {2};, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +listchildren.n.ac = select distinct a.rid {5} from ac_css a {0} where {1} 1 = 1 {2};, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v + +countestimate = select count(*) from (select distinct a.rid {5} from css a {0} where {1} 1 = 1 {2}) as tocount;, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +countestimate.n.au = select count(*) from (select distinct a.rid {5} from au_css a {0} where {1} 1 = 1 {2}) as tocount;, au_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +countestimate.n.cn = select count(*) from (select distinct a.rid {5} from cn_css a {0} where {1} 1 = 1 {2}) as tocount;, cn_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v +countestimate.n.ac = select count(*) from (select distinct a.rid {5} from ac_css a {0} where {1} 1 = 1 {2}) as tocount;, ac_css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid ; {0}.cid = ? and {0}.rid = a.rid ; order by {0}.v {1} ;, {0}.v + + +# statement to validate the connection +validate = select 1 + +# What type of rowID has should be used. Must be non colliding (reasonable probability), cant be changed once set without data migration. +# SHA-1 has a 1:10E14 probability of collision, so IMVHO is Ok here. Do not use MD5, it will collide. +rowid-hash = SHA1 + +# statement to check that the schema exists +check-schema = select count(*) from css + +# Use batch Inserts means that update operations will be performed as batches rather than single SQL statements. This only really effects the update of +# Index tables and not the content store but it will reduce the number of SQL operations where more than one field is indexed per content item. +use-batch-inserts = 1 + +# this property indicates which version of the JRE your JDBC driver targets +# e.g. the driver for Postgres 9 does not support JDBC methods introduced in JRE 1.6 +jdbc-support-level = 1.5 + +# +# PostgreSQL does not like to perform insert, fail, update operations. It wants update if missing insert. +sql-statement-sequence = updateFirst + + +index-column-name-select = select cf, cid, cname from css_wr +index-column-name-insert = insert into css_wr ( cf, cid, cname ) values ( ? , ? , ? ) +alter-widestring-table = ALTER TABLE {0}_css_w ADD {1} varchar(780) +index-widestring-table = CREATE INDEX {0}_css_w_{1} ON {0}_css_w ({1}) + +exists-widestring-row = select rid from css_w where rid = ? +exists-widestring-row.n.cn = select rid from cn_css_w where rid = ? +exists-widestring-row.n.ac = select rid from ac_css_w where rid = ? +exists-widestring-row.n.au = select rid from au_css_w where rid = ? +exists-widestring-row.n.lk = select rid from lk_css_w where rid = ? + +delete-widestring-row = delete from css_w where rid = ? +delete-widestring-row.n.cn = delete from cn_css_w where rid = ? +delete-widestring-row.n.ac = delete from ac_css_w where rid = ? +delete-widestring-row.n.au = delete from au_css_w where rid = ? +delete-widestring-row.n.lk = delete from lk_css_w where rid = ? + +update-widestring-row = update css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.cn = update cn_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.ac = update ac_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.au = update au_css_w set {0} where rid = ?; {0} = ? +update-widestring-row.n.lk = update lk_css_w set {0} where rid = ?; {0} = ? + + +insert-widestring-row = insert into css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.cn = insert into cn_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.ac = insert into ac_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.au = insert into au_css_w ( rid {0} ) values ( ? {1} ) +insert-widestring-row.n.lk = insert into lk_css_w ( rid {0} ) values ( ? {1} ) + + +wide-block-find = select a.rid from css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.cn = select a.rid from cn_css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.ac = select a.rid from ac_css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.au = select a.rid from au_css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-block-find.n.lk = select a.rid from lk_css_w a where {0} {1} limit {2,number,#} offset {3,number,#} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} + +wide-listchildren = select a.rid from css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.cn = select a.rid from cn_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.ac = select a.rid from ac_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.au = select a.rid from au_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-listchildren.n.lk = select a.rid from lk_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} + +wide-countestimate = select count(*) from css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.cn = select count(*) from cn_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from cn_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.ac = select count(*) from ac_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from ac_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.au = select count(*) from au_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from au_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} +wide-countestimate.n.lk = select count(*) from lk_css_w a where {0} {1} ;a.{0} = ?;a.rid in ( select {0}.rid from lk_css {0} where {1} );{0}.cid = ? and {0}.v = ?;order by {0};{0} {1} + diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/Issue125-MySQL.sql b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/Issue125-MySQL.sql new file mode 100644 index 00000000..409392d8 --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/Issue125-MySQL.sql @@ -0,0 +1,8 @@ +# Fixes https://github.com/ieb/sparsemapcontent/issues/125 migrating data for existing schema + +ALTER TABLE `css_b` MODIFY `b` mediumblob; +ALTER TABLE `cn_css_b` MODIFY `b` mediumblob; +ALTER TABLE `au_css_b` MODIFY `b` mediumblob; +ALTER TABLE `ac_css_b` MODIFY `b` mediumblob; +ALTER TABLE `lk_css_b` MODIFY `b` mediumblob; + diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/KERN-2005-PostgreSQL.sql b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/KERN-2005-PostgreSQL.sql new file mode 100644 index 00000000..cd7e852c --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/KERN-2005-PostgreSQL.sql @@ -0,0 +1,7 @@ + +# This is only for postgresql instances. + +alter table css_b add CONSTRAINT css_b_rid_uk UNIQUE (rid); +alter table au_css_b add CONSTRAINT au_css_b_rid_uk UNIQUE (rid); +alter table ac_css_b add CONSTRAINT ac_css_b_rid_uk UNIQUE (rid); +alter table cn_css_b add CONSTRAINT cn_css_b_rid_uk UNIQUE (rid); \ No newline at end of file diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/README.txt b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/README.txt new file mode 100644 index 00000000..8b05ebaa --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/README.txt @@ -0,0 +1,3 @@ +In this folder you should put upgrade scripts. +They should be named by date and feature and contain the SQL statements to be run against a database to perform an upgrade on the database. +If its not possible to express the action in SQL, then express the action in words. \ No newline at end of file diff --git a/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/v1.0--v1.3.sql b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/v1.0--v1.3.sql new file mode 100644 index 00000000..1212e701 --- /dev/null +++ b/drivers/jdbc/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/upgrades/v1.0--v1.3.sql @@ -0,0 +1,75 @@ +-- WRITTEN FOR ORACLE +-- DROP TABLE lk_css cascade constraints; + +CREATE TABLE lk_css ( + id NUMBER NOT NULL, + rid varchar2(32) NOT NULL, + cid varchar2(64) NOT NULL, + v varchar2(780) NOT NULL, + PRIMARY KEY (id)) +; + +CREATE SEQUENCE seq_lk_css_id; + +CREATE INDEX lk_css_rowkey ON lk_css(rid,cid); +CREATE INDEX lk_css_cid_locate_i ON lk_css(v,cid); + +-- DROP TABLE css_w cascade constraints; + +CREATE TABLE css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + +-- DROP TABLE ac_css_w cascade constraints; + +CREATE TABLE ac_css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + +-- DROP TABLE au_css_w cascade constraints; + +CREATE TABLE au_css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + +-- DROP TABLE cn_css_w cascade constraints; + +CREATE TABLE cn_css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + +-- DROP TABLE lk_css_w cascade constraints; + +CREATE TABLE lk_css_w ( + rid varchar2(32) NOT NULL, + PRIMARY KEY(rid)) +; + + +-- DROP TABLE css_wr cascade constraints; + +CREATE TABLE css_wr ( + id NUMBER NOT NULL, + cf varchar2(32) NOT NULL, + cid varchar2(64) NOT NULL, + cname varchar2(64) NOT NULL, + primary key(id)); + + +CREATE SEQUENCE seq_css_wr_id; + +CREATE UNIQUE INDEX css_wr_cid ON css_wr(cf,cid); +CREATE UNIQUE INDEX css_wr_cnam ON css_wr(cf,cname); + + +-- DROP TABLE lk_css_b cascade constraints; + +CREATE TABLE lk_css_b ( + rid varchar2(32) NOT NULL, + b blob, + PRIMARY KEY (rid) ) +; diff --git a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AccessControlManagerImplMan.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AccessControlManagerImplTest.java similarity index 71% rename from src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AccessControlManagerImplMan.java rename to drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AccessControlManagerImplTest.java index 6fb17cc8..39c001a2 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AccessControlManagerImplMan.java +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AccessControlManagerImplTest.java @@ -17,14 +17,15 @@ */ package org.sakaiproject.nakamura.lite.jdbc.mysql; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.accesscontrol.AbstractAccessControlManagerImplTest; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; -public class AccessControlManagerImplMan extends AbstractAccessControlManagerImplTest { +public class AccessControlManagerImplTest extends AbstractAccessControlManagerImplTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { - return MysqlSetup.getClientPool(); + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return MysqlSetup.getClientPool(configuration); } } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AuthorizableManagerImplMan.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AuthorizableManagerImplTest.java similarity index 71% rename from src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AuthorizableManagerImplMan.java rename to drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AuthorizableManagerImplTest.java index 9894b936..a00bc9ac 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AuthorizableManagerImplMan.java +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/AuthorizableManagerImplTest.java @@ -17,14 +17,15 @@ */ package org.sakaiproject.nakamura.lite.jdbc.mysql; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.authorizable.AbstractAuthorizableManagerImplTest; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; -public class AuthorizableManagerImplMan extends AbstractAuthorizableManagerImplTest { +public class AuthorizableManagerImplTest extends AbstractAuthorizableManagerImplTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { - return MysqlSetup.getClientPool(); + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return MysqlSetup.getClientPool(configuration); } } diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/ContentManagerFinderImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/ContentManagerFinderImplTest.java new file mode 100644 index 00000000..c7ff2739 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/ContentManagerFinderImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.mysql; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerFinderTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class ContentManagerFinderImplTest extends AbstractContentManagerFinderTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return MysqlSetup.getClientPool(configuration); + } + +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/ContentManagerManagerImplMan.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/ContentManagerImplTest.java similarity index 72% rename from src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/ContentManagerManagerImplMan.java rename to drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/ContentManagerImplTest.java index bbb4ac87..32657ac4 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/ContentManagerManagerImplMan.java +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/ContentManagerImplTest.java @@ -17,14 +17,15 @@ */ package org.sakaiproject.nakamura.lite.jdbc.mysql; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.lite.content.AbstractContentManagerTest; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; -public class ContentManagerManagerImplMan extends AbstractContentManagerTest { +public class ContentManagerImplTest extends AbstractContentManagerTest { @Override - protected StorageClientPool getClientPool() throws ClassNotFoundException { - return MysqlSetup.getClientPool(); + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return MysqlSetup.getClientPool(configuration); } } diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/KeyValueRowsMain.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/KeyValueRowsMain.java new file mode 100644 index 00000000..bc963109 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/KeyValueRowsMain.java @@ -0,0 +1,275 @@ +package org.sakaiproject.nakamura.lite.jdbc.mysql; + +import java.io.File; +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.io.FileUtils; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +public class KeyValueRowsMain { + + private Connection connection; + private String[] dictionary; + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public KeyValueRowsMain() { + System.err.println(this.getClass().getName()); + } + + public void deleteDb(String file) { + FileUtils.deleteQuietly(new File(file)); + } + + public void open() throws SQLException { + connection = DriverManager + .getConnection("jdbc:mysql://127.0.0.1:3306/sakai22?useUnicode=true&characterEncoding=UTF-8", "sakai22", "sakai22"); + connection.setAutoCommit(false); + } + + public void createTables(int columns) throws SQLException { + Statement s = connection.createStatement(); + s.execute("DROP TABLE IF EXISTS cn_css_kv"); + StringBuilder sql = new StringBuilder(); + sql.append("CREATE TABLE cn_css_kv ("); + sql.append("`id` INT NOT NULL AUTO_INCREMENT,"); + sql.append("`rid` varchar(32) NOT NULL,"); + sql.append("`cid` varchar(64) NOT NULL,"); + sql.append("`v` varchar(780),"); + sql.append("primary key(`id`), KEY `rowkey` (`rid`,`cid`), KEY `cid_locate_i` (`v`(255),`cid`)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci"); + s.execute(sql.toString()); + s.close(); + } + + public void populateDictionary(int size) throws NoSuchAlgorithmException, + UnsupportedEncodingException { + dictionary = new String[size]; + MessageDigest md = MessageDigest.getInstance("SHA1"); + for (int i = 0; i < size; i++) { + dictionary[i] = Base64.encodeBase64URLSafeString(md.digest(String.valueOf("Dict" + i) + .getBytes("UTF-8"))); + } + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void loadTable(int columns, int records) throws SQLException, + UnsupportedEncodingException, NoSuchAlgorithmException { + StringBuilder sb = new StringBuilder(); + sb.append("insert into cn_css_kv (rid, cid, v) values ( ?, ?, ?)"); + PreparedStatement p = null; + Statement s = null; + ResultSet rs = null; + try { + p = connection.prepareStatement(sb.toString()); + s = connection.createStatement(); + MessageDigest sha1 = MessageDigest.getInstance("SHA1"); + SecureRandom sr = new SecureRandom(); + long cs = System.currentTimeMillis(); + rs = s.executeQuery( + "select count(*) from cn_css_kv"); + rs.next(); + int nrows = rs.getInt(1); + for (int i = 0 + nrows; i < records + nrows; i++) { + String rid = Base64.encodeBase64URLSafeString(sha1.digest(String + .valueOf("TEST" + i).getBytes("UTF-8"))); + for (int j = 0; j < columns; j++) { + if (sr.nextBoolean()) { + p.clearParameters(); + p.setString(1, rid); + p.setString(2, "v" + j); + p.setString(3, dictionary[sr.nextInt(dictionary.length)]); + p.execute(); + } + } + + if (i % 500 == 0) { + connection.commit(); + long ct = System.currentTimeMillis(); + System.err.print(""+i+","+(ct-cs)+","); + testSelect(2, 0, columns, 5000,true); + cs = System.currentTimeMillis(); + } + } + } finally { + if (rs != null) { + try { + rs.close(); + } catch (SQLException e ) { + + } + } + if (p != null) { + try { + p.close(); + } catch (SQLException e ) { + + } + } + if (s != null) { + try { + s.close(); + } catch (SQLException e ) { + + } + } + } + } + + private void close() throws SQLException { + connection.close(); + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void testSelect(int ncols, int sorts, int columns, long timeToLive, boolean csv) throws SQLException { + StringBuilder sb = new StringBuilder(); + SecureRandom sr = new SecureRandom(); + Set used = new LinkedHashSet(); + while (used.size() < ncols) { + int c = sr.nextInt(columns); + if (!used.contains(c)) { + used.add(c); + } + } + Integer[] cnums = used.toArray(new Integer[ncols]); + sb.append("select distinct a.rid "); + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + sb.append(", s").append(i).append(".v "); + } + } + sb.append(" from cn_css_kv a "); + for (int i = 0; i < ncols; i++) { + sb.append(" , cn_css_kv a").append(i); + } + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + sb.append(" , cn_css_kv s").append(i); + } + } + sb.append(" where "); + for (int i = 0; i < ncols; i++) { + sb.append(" a").append(i).append(".cid = ? AND a").append(i).append(".v = ? AND a") + .append(i).append(".rid = a.rid AND "); + } + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + sb.append("s").append(i).append(".cid = ? AND a.rid = s").append(i) + .append(".rid AND "); + } + } + sb.append(" 1 = 1 "); + Integer[] snums = null; + if (sorts > 0) { + sb.append(" order by "); + used.clear(); + while (used.size() < sorts) { + int c = sr.nextInt(columns); + if (!used.contains(c)) { + used.add(c); + } + } + snums = used.toArray(new Integer[ncols]); + for (int i = 0; i < sorts - 1; i++) { + sb.append("s").append(i).append(".v ,"); + } + sb.append("s").append(sorts - 1).append(".v "); + } + if ( !csv ) { + System.err.println(sb.toString()); + } + PreparedStatement p = null; + ResultSet rs = null; + long atstart = System.currentTimeMillis(); + int arows = 0; + int nq = 0; + try { + p = connection.prepareStatement(sb.toString()); + long endTestTime = atstart + timeToLive; + while (System.currentTimeMillis() < endTestTime) { + p.clearParameters(); + for (int i = 0; i < ncols; i++) { + p.setString(i * 2 + 1, "v" + cnums[i]); + p.setString(i * 2 + 2, dictionary[sr.nextInt(dictionary.length)]); + } + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + p.setString(i + 1 + ncols * 2, "s" + snums[i]); + } + } + rs = p.executeQuery(); + int rows = 0; + while (rs.next()) { + rows++; + } + arows += rows; + nq++; + rs.close(); + } + } finally { + if (rs != null) { + try { + rs.close(); + } catch (SQLException e) { + + } + } + if (p != null) { + try { + p.close(); + } catch (SQLException e) { + + } + } + } + double t = System.currentTimeMillis() - atstart; + double a = t / nq; + if ( csv ) { + System.err.println("" + (arows / nq) + "," + a ); + } else { + System.err.println("Found " + arows + " in " + t + "ms executed " + nq + " queries"); + System.err.println("Average " + (arows / nq) + " in " + a + "ms"); + } + + } + + public static void main(String[] argv) throws SQLException, NoSuchAlgorithmException, + UnsupportedEncodingException { + KeyValueRowsMain tmr = new KeyValueRowsMain(); + tmr.open(); + tmr.createTables(30); + tmr.populateDictionary(20); + tmr.loadTable(30, 10000); + tmr.testSelect(1, 0, 30, 5000); + tmr.testSelect(2, 0, 30, 5000); + tmr.testSelect(3, 0, 30, 5000); + tmr.testSelect(4, 0, 30, 5000); + tmr.testSelect(5, 0, 30, 5000); + tmr.testSelect(1, 1, 30, 5000); + tmr.testSelect(2, 1, 30, 5000); + tmr.testSelect(3, 1, 30, 5000); + tmr.testSelect(4, 1, 30, 5000); + tmr.testSelect(5, 1, 30, 5000); + tmr.testSelect(1, 2, 30, 5000); + tmr.testSelect(2, 2, 30, 5000); + tmr.testSelect(3, 2, 30, 5000); + tmr.testSelect(4, 2, 30, 5000); + tmr.testSelect(5, 2, 30, 5000); + tmr.close(); + } + private void testSelect(int i, int j, int k, int l) throws SQLException { + testSelect(i, j, k, l, false); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/LockManagerImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/LockManagerImplTest.java new file mode 100644 index 00000000..712f267c --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/LockManagerImplTest.java @@ -0,0 +1,14 @@ +package org.sakaiproject.nakamura.lite.jdbc.mysql; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.lock.AbstractLockManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class LockManagerImplTest extends AbstractLockManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return MysqlSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/MultiRowsMain.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/MultiRowsMain.java new file mode 100644 index 00000000..ad408db4 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/MultiRowsMain.java @@ -0,0 +1,217 @@ +package org.sakaiproject.nakamura.lite.jdbc.mysql; + +import java.io.File; +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.io.FileUtils; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +public class MultiRowsMain { + + + private Connection connection; + private String[] dictionary; + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public MultiRowsMain() { + System.err.println(this.getClass().getName()); + } + + public void deleteDb(String file) { + FileUtils.deleteQuietly(new File(file)); + } + + public void open() throws SQLException { + connection = DriverManager + .getConnection("jdbc:mysql://127.0.0.1:3306/sakai22?useUnicode=true&characterEncoding=UTF-8", "sakai22", "sakai22"); + connection.setAutoCommit(false); + } + + public void createTables(int columns) throws SQLException { + Statement s = connection.createStatement(); + s.execute("DROP TABLE IF EXISTS cn_css_index"); + StringBuilder sql = new StringBuilder(); + sql.append("CREATE TABLE cn_css_index ("); + sql.append("rid varchar(32) NOT NULL,"); + for ( int i = 0; i < columns; i++ ) { + sql.append("v").append(i).append(" varchar(780),"); + } + sql.append("primary key(rid)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci"); + s.execute(sql.toString()); + for ( int i = 0; i < columns; i++) { + s.execute("CREATE INDEX cn_css_index_v"+i+" ON cn_css_index (v"+i+")"); + } + s.close(); + } + + + + public void populateDictionary(int size) throws NoSuchAlgorithmException, UnsupportedEncodingException { + dictionary = new String[size]; + MessageDigest md = MessageDigest.getInstance("SHA1"); + for ( int i = 0; i < size; i++) { + dictionary[i] = Base64.encodeBase64URLSafeString(md.digest(String.valueOf("Dict"+i).getBytes("UTF-8"))); + } + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void loadTable(int columns, int records) throws SQLException, UnsupportedEncodingException, NoSuchAlgorithmException { + StringBuilder sb = new StringBuilder(); + sb.append("insert into cn_css_index (rid"); + for ( int i = 0; i < columns; i++ ) { + sb.append(",v").append(i); + } + sb.append(") values ( ?"); + for ( int i = 0; i < columns; i++ ) { + sb.append(",?"); + } + sb.append(")"); + PreparedStatement p = connection.prepareStatement(sb.toString()); + Statement s = connection.createStatement(); + MessageDigest sha1 = MessageDigest.getInstance("SHA1"); + SecureRandom sr = new SecureRandom(); + long cst = System.currentTimeMillis(); + long cs = System.currentTimeMillis(); + ResultSet rs = s.executeQuery("select count(*) from cn_css_index"); + rs.next(); + int nrows = rs.getInt(1); + for ( int i = 0+nrows; i < records+nrows; i++) { + String rid = Base64.encodeBase64URLSafeString(sha1.digest(String.valueOf("TEST"+i).getBytes("UTF-8"))); + p.clearParameters(); + p.setString(1, rid); + for ( int j = 2; j <= columns+1; j++) { + if ( sr.nextBoolean() ) { + p.setString(j, dictionary[sr.nextInt(dictionary.length)]); + } else { + p.setNull(j, Types.VARCHAR); + } + } + p.execute(); + + if ( i%500 == 0) { + connection.commit(); + long ct = System.currentTimeMillis(); + System.err.print(""+i+","+(ct-cs)+","); + testSelect(2, 0, columns, 5000,true); + cs = System.currentTimeMillis(); + } + } + long ctt = System.currentTimeMillis(); + System.err.println("Commit "+records+" "+(ctt-cst)+" ms average time per row to insert "+((double)records/((double)ctt-(double)cst))); + p.close(); + s.close(); + } + private void close() throws SQLException { + connection.close(); + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void testSelect(int ncols, int sorts, int columns, long timeToLive, boolean csv) throws SQLException { + StringBuilder sb = new StringBuilder(); + sb.append("select rid from cn_css_index where "); + SecureRandom sr = new SecureRandom(); + Set used = new LinkedHashSet(); + while(used.size() < ncols ) { + int c = sr.nextInt(columns); + if ( !used.contains(c) ) { + used.add(c); + } + } + Integer[] cnums = used.toArray(new Integer[ncols]); + for ( int i = 0; i < ncols-1; i++ ) { + + sb.append("v").append(cnums[i]).append(" = ? AND "); + } + sb.append("v").append(cnums[ncols-1]).append(" = ? "); + if ( sorts > 0 ) { + sb.append(" order by "); + used.clear(); + while(used.size() < sorts ) { + int c = sr.nextInt(columns); + if ( !used.contains(c) ) { + used.add(c); + } + } + cnums = used.toArray(new Integer[ncols]); + for ( int i = 0; i < sorts-1; i++ ) { + sb.append("v").append(cnums[i]).append(","); + } + sb.append("v").append(cnums[sorts-1]); + } + if ( !csv) { + System.err.println(sb.toString()); + } + PreparedStatement p = connection.prepareStatement(sb.toString()); + long atstart = System.currentTimeMillis(); + long endTestTime = atstart+timeToLive; + int nq = 0; + int arows = 0; + while(System.currentTimeMillis() < endTestTime) { + p.clearParameters(); + for ( int i = 1; i <= ncols; i++ ) { + p.setString(i, dictionary[sr.nextInt(dictionary.length)]); + } + ResultSet rs = p.executeQuery(); + int rows = 0; + while(rs.next()) { + rows++; + } + arows += rows; + nq++; + rs.close(); + } + double t = System.currentTimeMillis()-atstart; + double a = t/nq; + if ( csv ) { + System.err.println("" + (arows / nq) + "," + a ); + } else { + System.err.println("Found " + arows + " in " + t + "ms executed " + nq + " queries"); + System.err.println("Average " + (arows / nq) + " in " + a + "ms"); + } + p.close(); + + } + + public static void main(String[] argv) throws SQLException, NoSuchAlgorithmException, UnsupportedEncodingException { + MultiRowsMain tmr = new MultiRowsMain(); + tmr.open(); + tmr.createTables(25); + tmr.populateDictionary(20); + tmr.loadTable(25, 10000); + tmr.testSelect(1, 0, 25, 5000); + tmr.testSelect(2, 0, 25, 5000); + tmr.testSelect(3, 0, 25, 5000); + tmr.testSelect(4, 0, 25, 5000); + tmr.testSelect(5, 0, 25, 5000); + tmr.testSelect(1, 1, 25, 5000); + tmr.testSelect(2, 1, 25, 5000); + tmr.testSelect(3, 1, 25, 5000); + tmr.testSelect(4, 1, 25, 5000); + tmr.testSelect(5, 1, 25, 5000); + tmr.testSelect(1, 2, 25, 5000); + tmr.testSelect(2, 2, 25, 5000); + tmr.testSelect(3, 2, 25, 5000); + tmr.testSelect(4, 2, 25, 5000); + tmr.testSelect(5, 2, 25, 5000); + tmr.close(); + } + private void testSelect(int i, int j, int k, int l) throws SQLException { + testSelect(i, j, k, l, false); + } + + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/MysqlSetup.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/MysqlSetup.java new file mode 100644 index 00000000..3508c3e2 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/MysqlSetup.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.mysql; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.DummyStorageCacheManager; +import org.sakaiproject.nakamura.lite.storage.jdbc.BaseJDBCStorageClientPool; + +public class MysqlSetup { + + private static BaseJDBCStorageClientPool clientPool = null; + + public synchronized static BaseJDBCStorageClientPool createClientPool(Configuration configuration) { + try { + BaseJDBCStorageClientPool connectionPool = new BaseJDBCStorageClientPool(); + connectionPool.storageManagerCache = new DummyStorageCacheManager(); + Builder b = ImmutableMap.builder(); + b.put(BaseJDBCStorageClientPool.CONNECTION_URL,"jdbc:mysql://127.0.0.1:3306/sakai22?useUnicode=true&characterEncoding=UTF-8"); + b.put(BaseJDBCStorageClientPool.JDBC_DRIVER, "com.mysql.jdbc.Driver"); + b.put("username", "sakai22"); + b.put("password", "sakai22"); + b.put("store-base-dir", "target/store"); + b.put(Configuration.class.getName(), configuration); + connectionPool + .activate(b.build()); + return connectionPool; + } catch (ClassNotFoundException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + public synchronized static BaseJDBCStorageClientPool getClientPool(Configuration configuration) { + if ( clientPool == null) { + clientPool = createClientPool(configuration); + } + return clientPool; + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/AccessControlManagerImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/AccessControlManagerImplTest.java new file mode 100644 index 00000000..2846638a --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/AccessControlManagerImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.oracle; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.accesscontrol.AbstractAccessControlManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class AccessControlManagerImplTest extends AbstractAccessControlManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return OracleSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/AuthorizableManagerImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/AuthorizableManagerImplTest.java new file mode 100644 index 00000000..919c4237 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/AuthorizableManagerImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.oracle; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.authorizable.AbstractAuthorizableManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class AuthorizableManagerImplTest extends AbstractAuthorizableManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return OracleSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/ContentManagerFinderImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/ContentManagerFinderImplTest.java new file mode 100644 index 00000000..50ba6767 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/ContentManagerFinderImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.oracle; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerFinderTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class ContentManagerFinderImplTest extends AbstractContentManagerFinderTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return OracleSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/ContentManagerImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/ContentManagerImplTest.java new file mode 100644 index 00000000..cd31f48a --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/ContentManagerImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.oracle; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class ContentManagerImplTest extends AbstractContentManagerTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return OracleSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/LockManagerImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/LockManagerImplTest.java new file mode 100644 index 00000000..e5764851 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/LockManagerImplTest.java @@ -0,0 +1,14 @@ +package org.sakaiproject.nakamura.lite.jdbc.oracle; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.lock.AbstractLockManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class LockManagerImplTest extends AbstractLockManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return OracleSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/OracleSetup.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/OracleSetup.java new file mode 100644 index 00000000..5d791e61 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/oracle/OracleSetup.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.oracle; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.DummyStorageCacheManager; +import org.sakaiproject.nakamura.lite.storage.jdbc.BaseJDBCStorageClientPool; + +public class OracleSetup { + + private static BaseJDBCStorageClientPool clientPool = null; + + public synchronized static BaseJDBCStorageClientPool createClientPool(Configuration configuration) { + try { + BaseJDBCStorageClientPool connectionPool = new BaseJDBCStorageClientPool(); + connectionPool.storageManagerCache = new DummyStorageCacheManager(); + Builder b = ImmutableMap.builder(); + b.put(BaseJDBCStorageClientPool.CONNECTION_URL,"jdbc:oracle:thin:@172.16.41.128:1521:XE"); + b.put(BaseJDBCStorageClientPool.JDBC_DRIVER, "oracle.jdbc.driver.OracleDriver"); + b.put("username", "sakai22"); + b.put("password", "sakai22"); + b.put("store-base-dir", "target/store"); + b.put(Configuration.class.getName(), configuration); + connectionPool + .activate(b.build()); + return connectionPool; + } catch (ClassNotFoundException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + public synchronized static BaseJDBCStorageClientPool getClientPool(Configuration configuration) { + if ( clientPool == null) { + clientPool = createClientPool(configuration); + } + return clientPool; + } +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/AccessControlManagerImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/AccessControlManagerImplTest.java new file mode 100644 index 00000000..3239985e --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/AccessControlManagerImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.postgresql; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.accesscontrol.AbstractAccessControlManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class AccessControlManagerImplTest extends AbstractAccessControlManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return PostgreSQLSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/AuthorizableManagerImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/AuthorizableManagerImplTest.java new file mode 100644 index 00000000..a45552ec --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/AuthorizableManagerImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.postgresql; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.authorizable.AbstractAuthorizableManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class AuthorizableManagerImplTest extends AbstractAuthorizableManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return PostgreSQLSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/ContentManagerFinderImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/ContentManagerFinderImplTest.java new file mode 100644 index 00000000..334d4f65 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/ContentManagerFinderImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.postgresql; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerFinderTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class ContentManagerFinderImplTest extends AbstractContentManagerFinderTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return PostgreSQLSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/ContentManagerImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/ContentManagerImplTest.java new file mode 100644 index 00000000..8571a2ad --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/ContentManagerImplTest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.postgresql; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class ContentManagerImplTest extends AbstractContentManagerTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return PostgreSQLSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/KeyValueRowsMain.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/KeyValueRowsMain.java new file mode 100644 index 00000000..2334ffa3 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/KeyValueRowsMain.java @@ -0,0 +1,277 @@ +package org.sakaiproject.nakamura.lite.jdbc.postgresql; + +import java.io.File; +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.io.FileUtils; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +public class KeyValueRowsMain { + + private Connection connection; + private String[] dictionary; + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public KeyValueRowsMain() { + System.err.println(this.getClass().getName()); + } + + public void deleteDb(String file) { + FileUtils.deleteQuietly(new File(file)); + } + + public void open() throws SQLException { + connection = DriverManager.getConnection("jdbc:postgresql://localhost/nak", "nakamura", "nakamura"); + connection.setAutoCommit(false); + } + + public void createTables(int columns) throws SQLException { + Statement s = connection.createStatement(); + s.execute("DROP TABLE IF EXISTS cn_css_kv cascade"); + StringBuilder sql = new StringBuilder(); + sql.append("CREATE TABLE cn_css_kv ("); + sql.append("id serial,"); + sql.append("rid varchar(32) NOT NULL,"); + sql.append("cid varchar(64) NOT NULL,"); + sql.append("v varchar(780),"); + sql.append("constraint cn_css_kv_pk primary key(id)) "); + s.execute(sql.toString()); + s.execute("CREATE INDEX cn_css_kv_rowkey ON cn_css_kv (rid,cid)"); + s.execute("CREATE INDEX cn_css_kv_locate ON cn_css_kv (v,cid)"); + + s.close(); + } + + public void populateDictionary(int size) throws NoSuchAlgorithmException, + UnsupportedEncodingException { + dictionary = new String[size]; + MessageDigest md = MessageDigest.getInstance("SHA1"); + for (int i = 0; i < size; i++) { + dictionary[i] = Base64.encodeBase64URLSafeString(md.digest(String.valueOf("Dict" + i) + .getBytes("UTF-8"))); + } + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void loadTable(int columns, int records) throws SQLException, + UnsupportedEncodingException, NoSuchAlgorithmException { + StringBuilder sb = new StringBuilder(); + sb.append("insert into cn_css_kv (rid, cid, v) values ( ?, ?, ?)"); + PreparedStatement p = null; + Statement s = null; + ResultSet rs = null; + try { + p = connection.prepareStatement(sb.toString()); + s = connection.createStatement(); + MessageDigest sha1 = MessageDigest.getInstance("SHA1"); + SecureRandom sr = new SecureRandom(); + long cs = System.currentTimeMillis(); + rs = s.executeQuery( + "select count(*) from cn_css_kv"); + rs.next(); + int nrows = rs.getInt(1); + for (int i = 0 + nrows; i < records + nrows; i++) { + String rid = Base64.encodeBase64URLSafeString(sha1.digest(String + .valueOf("TEST" + i).getBytes("UTF-8"))); + for (int j = 0; j < columns; j++) { + if (sr.nextBoolean()) { + p.clearParameters(); + p.setString(1, rid); + p.setString(2, "v" + j); + p.setString(3, dictionary[sr.nextInt(dictionary.length)]); + p.execute(); + } + } + + if (i % 500 == 0) { + connection.commit(); + long ct = System.currentTimeMillis(); + System.err.print(""+i+","+(ct-cs)+","); + testSelect(2, 0, columns, 5000,true); + cs = System.currentTimeMillis(); + } + } + } finally { + if (rs != null) { + try { + rs.close(); + } catch (SQLException e ) { + + } + } + if (p != null) { + try { + p.close(); + } catch (SQLException e ) { + + } + } + if (s != null) { + try { + s.close(); + } catch (SQLException e ) { + + } + } + } + } + + private void close() throws SQLException { + connection.close(); + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void testSelect(int ncols, int sorts, int columns, long timeToLive, boolean csv) throws SQLException { + StringBuilder sb = new StringBuilder(); + SecureRandom sr = new SecureRandom(); + Set used = new LinkedHashSet(); + while (used.size() < ncols) { + int c = sr.nextInt(columns); + if (!used.contains(c)) { + used.add(c); + } + } + Integer[] cnums = used.toArray(new Integer[ncols]); + sb.append("select distinct a.rid "); + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + sb.append(", s").append(i).append(".v "); + } + } + sb.append(" from cn_css_kv a "); + for (int i = 0; i < ncols; i++) { + sb.append(" , cn_css_kv a").append(i); + } + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + sb.append(" , cn_css_kv s").append(i); + } + } + sb.append(" where "); + for (int i = 0; i < ncols; i++) { + sb.append(" a").append(i).append(".cid = ? AND a").append(i).append(".v = ? AND a") + .append(i).append(".rid = a.rid AND "); + } + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + sb.append("s").append(i).append(".cid = ? AND a.rid = s").append(i) + .append(".rid AND "); + } + } + sb.append(" 1 = 1 "); + Integer[] snums = null; + if (sorts > 0) { + sb.append(" order by "); + used.clear(); + while (used.size() < sorts) { + int c = sr.nextInt(columns); + if (!used.contains(c)) { + used.add(c); + } + } + snums = used.toArray(new Integer[ncols]); + for (int i = 0; i < sorts - 1; i++) { + sb.append("s").append(i).append(".v ,"); + } + sb.append("s").append(sorts - 1).append(".v "); + } + if ( !csv) { + System.err.println(sb.toString()); + } + PreparedStatement p = null; + ResultSet rs = null; + long atstart = System.currentTimeMillis(); + int arows = 0; + int nq = 0; + try { + p = connection.prepareStatement(sb.toString()); + long endTestTime = atstart + timeToLive; + while (System.currentTimeMillis() < endTestTime) { + p.clearParameters(); + for (int i = 0; i < ncols; i++) { + p.setString(i * 2 + 1, "v" + cnums[i]); + p.setString(i * 2 + 2, dictionary[sr.nextInt(dictionary.length)]); + } + if (sorts > 0) { + for (int i = 0; i < sorts; i++) { + p.setString(i + 1 + ncols * 2, "s" + snums[i]); + } + } + rs = p.executeQuery(); + int rows = 0; + while (rs.next()) { + rows++; + } + arows += rows; + nq++; + rs.close(); + } + } finally { + if (rs != null) { + try { + rs.close(); + } catch (SQLException e) { + + } + } + if (p != null) { + try { + p.close(); + } catch (SQLException e) { + + } + } + } + double t = System.currentTimeMillis() - atstart; + double a = t / nq; + if ( csv ) { + System.err.println("" + (arows / nq) + "," + a ); + } else { + System.err.println("Found " + arows + " in " + t + "ms executed " + nq + " queries"); + System.err.println("Average " + (arows / nq) + " in " + a + "ms"); + } + + } + + public static void main(String[] argv) throws SQLException, NoSuchAlgorithmException, + UnsupportedEncodingException { + KeyValueRowsMain tmr = new KeyValueRowsMain(); + tmr.open(); + tmr.createTables(30); + tmr.populateDictionary(20); + tmr.loadTable(30, 10000); + tmr.testSelect(1, 0, 30, 5000); + tmr.testSelect(2, 0, 30, 5000); + tmr.testSelect(3, 0, 30, 5000); + tmr.testSelect(4, 0, 30, 5000); + tmr.testSelect(5, 0, 30, 5000); + tmr.testSelect(1, 1, 30, 5000); + tmr.testSelect(2, 1, 30, 5000); + tmr.testSelect(3, 1, 30, 5000); + tmr.testSelect(4, 1, 30, 5000); + tmr.testSelect(5, 1, 30, 5000); + tmr.testSelect(1, 2, 30, 5000); + tmr.testSelect(2, 2, 30, 5000); + tmr.testSelect(3, 2, 30, 5000); + tmr.testSelect(4, 2, 30, 5000); + tmr.testSelect(5, 2, 30, 5000); + tmr.close(); + } + private void testSelect(int i, int j, int k, int l) throws SQLException { + testSelect(i, j, k, l, false); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/LockManagerImplTest.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/LockManagerImplTest.java new file mode 100644 index 00000000..e64252e6 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/LockManagerImplTest.java @@ -0,0 +1,14 @@ +package org.sakaiproject.nakamura.lite.jdbc.postgresql; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.lock.AbstractLockManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class LockManagerImplTest extends AbstractLockManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return PostgreSQLSetup.getClientPool(configuration); + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/MultiRowsMain.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/MultiRowsMain.java new file mode 100644 index 00000000..4cca6768 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/MultiRowsMain.java @@ -0,0 +1,217 @@ +package org.sakaiproject.nakamura.lite.jdbc.postgresql; + +import java.io.File; +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.io.FileUtils; + +import edu.umd.cs.findbugs.annotations.SuppressWarnings; + +public class MultiRowsMain { + + + private Connection connection; + private String[] dictionary; + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public MultiRowsMain() { + System.err.println(this.getClass().getName()); + } + + public void deleteDb(String file) { + FileUtils.deleteQuietly(new File(file)); + } + + public void open() throws SQLException { + connection = DriverManager + .getConnection("jdbc:postgresql://localhost/nak", "nakamura", "nakamura"); + connection.setAutoCommit(false); + } + + public void createTables(int columns) throws SQLException { + Statement s = connection.createStatement(); + s.execute("DROP table if exists cn_css_index cascade"); + StringBuilder sql = new StringBuilder(); + sql.append("CREATE TABLE cn_css_index ("); + sql.append("rid varchar(32) NOT NULL,"); + for ( int i = 0; i < columns; i++ ) { + sql.append("v").append(i).append(" varchar(780),"); + } + sql.append("constraint cn_css_index_pk primary key(rid))"); + s.execute(sql.toString()); + for ( int i = 0; i < columns; i++) { + s.execute("CREATE INDEX cn_css_index_v"+i+" ON cn_css_index (v"+i+")"); + } + s.close(); + } + + + + public void populateDictionary(int size) throws NoSuchAlgorithmException, UnsupportedEncodingException { + dictionary = new String[size]; + MessageDigest md = MessageDigest.getInstance("SHA1"); + for ( int i = 0; i < size; i++) { + dictionary[i] = Base64.encodeBase64URLSafeString(md.digest(String.valueOf("Dict"+i).getBytes("UTF-8"))); + } + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void loadTable(int columns, int records) throws SQLException, UnsupportedEncodingException, NoSuchAlgorithmException { + StringBuilder sb = new StringBuilder(); + sb.append("insert into cn_css_index (rid"); + for ( int i = 0; i < columns; i++ ) { + sb.append(",v").append(i); + } + sb.append(") values ( ?"); + for ( int i = 0; i < columns; i++ ) { + sb.append(",?"); + } + sb.append(")"); + PreparedStatement p = connection.prepareStatement(sb.toString()); + Statement s = connection.createStatement(); + MessageDigest sha1 = MessageDigest.getInstance("SHA1"); + SecureRandom sr = new SecureRandom(); + long cst = System.currentTimeMillis(); + long cs = System.currentTimeMillis(); + ResultSet rs = s.executeQuery("select count(*) from cn_css_index"); + rs.next(); + int nrows = rs.getInt(1); + for ( int i = 0+nrows; i < records+nrows; i++) { + String rid = Base64.encodeBase64URLSafeString(sha1.digest(String.valueOf("TEST"+i).getBytes("UTF-8"))); + p.clearParameters(); + p.setString(1, rid); + for ( int j = 2; j <= columns+1; j++) { + if ( sr.nextBoolean() ) { + p.setString(j, dictionary[sr.nextInt(dictionary.length)]); + } else { + p.setNull(j, Types.VARCHAR); + } + } + p.execute(); + + if ( i%500 == 0) { + connection.commit(); + long ct = System.currentTimeMillis(); + System.err.print(""+i+","+(ct-cs)+","); + testSelect(2, 0, columns, 5000, true); + cs = System.currentTimeMillis(); + } + } + long ctt = System.currentTimeMillis(); + System.err.println("Commit "+records+" "+(ctt-cst)+" ms average time per row to insert "+((double)records/((double)ctt-(double)cst))); + p.close(); + s.close(); + } + private void close() throws SQLException { + connection.close(); + } + + @SuppressWarnings(value="NP_ALWAYS_NULL", justification="How can System.err be null ?") + public void testSelect(int ncols, int sorts, int columns, long timeToLive, boolean csv) throws SQLException { + StringBuilder sb = new StringBuilder(); + sb.append("select rid from cn_css_index where "); + SecureRandom sr = new SecureRandom(); + Set used = new LinkedHashSet(); + while(used.size() < ncols ) { + int c = sr.nextInt(columns); + if ( !used.contains(c) ) { + used.add(c); + } + } + Integer[] cnums = used.toArray(new Integer[ncols]); + for ( int i = 0; i < ncols-1; i++ ) { + + sb.append("v").append(cnums[i]).append(" = ? AND "); + } + sb.append("v").append(cnums[ncols-1]).append(" = ? "); + if ( sorts > 0 ) { + sb.append(" order by "); + used.clear(); + while(used.size() < sorts ) { + int c = sr.nextInt(columns); + if ( !used.contains(c) ) { + used.add(c); + } + } + cnums = used.toArray(new Integer[ncols]); + for ( int i = 0; i < sorts-1; i++ ) { + sb.append("v").append(cnums[i]).append(","); + } + sb.append("v").append(cnums[sorts-1]); + } + if ( !csv) { + System.err.println(sb.toString()); + } + PreparedStatement p = connection.prepareStatement(sb.toString()); + long atstart = System.currentTimeMillis(); + long endTestTime = atstart+timeToLive; + int nq = 0; + int arows = 0; + while(System.currentTimeMillis() < endTestTime) { + p.clearParameters(); + for ( int i = 1; i <= ncols; i++ ) { + p.setString(i, dictionary[sr.nextInt(dictionary.length)]); + } + ResultSet rs = p.executeQuery(); + int rows = 0; + while(rs.next()) { + rows++; + } + arows += rows; + nq++; + rs.close(); + } + double t = System.currentTimeMillis()-atstart; + double a = t/nq; + if ( csv ) { + System.err.println("" + (arows / nq) + "," + a ); + } else { + System.err.println("Found " + arows + " in " + t + "ms executed " + nq + " queries"); + System.err.println("Average " + (arows / nq) + " in " + a + "ms"); + } + p.close(); + + } + + public static void main(String[] argv) throws SQLException, NoSuchAlgorithmException, UnsupportedEncodingException { + MultiRowsMain tmr = new MultiRowsMain(); + tmr.open(); + tmr.createTables(30); + tmr.populateDictionary(20); + tmr.loadTable(30, 10000); + tmr.testSelect(1, 0, 30, 5000); + tmr.testSelect(2, 0, 30, 5000); + tmr.testSelect(3, 0, 30, 5000); + tmr.testSelect(4, 0, 30, 5000); + tmr.testSelect(5, 0, 30, 5000); + tmr.testSelect(1, 1, 30, 5000); + tmr.testSelect(2, 1, 30, 5000); + tmr.testSelect(3, 1, 30, 5000); + tmr.testSelect(4, 1, 30, 5000); + tmr.testSelect(5, 1, 30, 5000); + tmr.testSelect(1, 2, 30, 5000); + tmr.testSelect(2, 2, 30, 5000); + tmr.testSelect(3, 2, 30, 5000); + tmr.testSelect(4, 2, 30, 5000); + tmr.testSelect(5, 2, 30, 5000); + tmr.close(); + } + private void testSelect(int i, int j, int k, int l) throws SQLException { + testSelect(i, j, k, l, false); + } + + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/PostgreSQLSetup.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/PostgreSQLSetup.java new file mode 100644 index 00000000..342ba896 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/jdbc/postgresql/PostgreSQLSetup.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.jdbc.postgresql; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.DummyStorageCacheManager; +import org.sakaiproject.nakamura.lite.storage.jdbc.BaseJDBCStorageClientPool; + +public class PostgreSQLSetup { + + private static BaseJDBCStorageClientPool clientPool = null; + + public synchronized static BaseJDBCStorageClientPool createClientPool(Configuration configuration) { + try { + BaseJDBCStorageClientPool connectionPool = new BaseJDBCStorageClientPool(); + connectionPool.storageManagerCache = new DummyStorageCacheManager(); + Builder b = ImmutableMap.builder(); + b.put(BaseJDBCStorageClientPool.CONNECTION_URL,"jdbc:postgresql://localhost/nak"); + b.put(BaseJDBCStorageClientPool.JDBC_DRIVER, "org.postgresql.Driver"); + b.put("username", "nakamura"); + b.put("password", "nakamura"); + b.put("store-base-dir", "target/store"); + b.put(Configuration.class.getName(), configuration); + connectionPool + .activate(b.build()); + return connectionPool; + } catch (ClassNotFoundException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + public synchronized static BaseJDBCStorageClientPool getClientPool(Configuration configuration) { + if ( clientPool == null) { + clientPool = createClientPool(configuration); + } + return clientPool; + } + +} diff --git a/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/ContentCreateSoak.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/ContentCreateSoak.java new file mode 100644 index 00000000..f8b12359 --- /dev/null +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/ContentCreateSoak.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.mysql; + +import java.io.IOException; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.jdbc.mysql.MysqlSetup; +import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; +import org.sakaiproject.nakamura.lite.soak.content.ContentCreateClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import com.google.common.collect.Maps; + +public class ContentCreateSoak extends AbstractSoakController { + + private int totalContent; + private StorageClientPool connectionPool; + private Configuration configuration; + private Map contentMap; + + public ContentCreateSoak(int totalContent, + StorageClientPool connectionPool, Configuration configuration, Map cm) { + super(totalContent); + this.configuration = configuration; + this.connectionPool = connectionPool; + this.totalContent = totalContent; + this.contentMap = cm; + } + + protected Runnable getRunnable(int nthreads) throws ClientPoolException, + StorageClientException, AccessDeniedException { + int contentPerThread = totalContent / nthreads; + return new ContentCreateClient(contentPerThread, + connectionPool, configuration, contentMap); + } + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + + int totalContent = 100000; + int nthreads = 1; + + if (argv.length > 0) { + nthreads = StorageClientUtils.getSetting(Integer.valueOf(argv[0]), nthreads); + } + if (argv.length > 1) { + totalContent = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalContent); + } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map cm = Maps.newHashMap(); + cm.put("sling:resourceType","test/resourcetype"); + cm.put("sakai:pooled-content-manager",new String[]{"a","b"}); + cm.put("sakai:type","sdfsdaggdsfgsdgsd"); + cm.put("sakai:marker","marker-marker-marker"); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + + ContentCreateSoak contentCreateSoak = new ContentCreateSoak( + totalContent, MysqlSetup.getClientPool(configuration), configuration, cm); + contentCreateSoak.launchSoak(nthreads); + + + } + + + +} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsSoak.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsSoak.java similarity index 71% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsSoak.java rename to drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsSoak.java index 3b2ebe0f..b46ea234 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsSoak.java +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsSoak.java @@ -17,22 +17,31 @@ */ package org.sakaiproject.nakamura.lite.soak.mysql; +import com.google.common.collect.Maps; + import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; import org.sakaiproject.nakamura.lite.jdbc.mysql.MysqlSetup; import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import java.io.IOException; +import java.util.Map; public class CreateUsersAndGroupsSoak extends AbstractSoakController { private int totalUsers; private StorageClientPool connectionPool; + private Configuration configuration; - public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool) { + public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool, Configuration configuration) { super(totalUsers); + this.configuration = configuration; this.connectionPool = connectionPool; this.totalUsers = totalUsers; } @@ -40,11 +49,11 @@ public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool connectionPool protected Runnable getRunnable(int nthreads) throws ClientPoolException, StorageClientException, AccessDeniedException { int usersPerThread = totalUsers / nthreads; - return new CreateUsersAndGroupsClient(usersPerThread, connectionPool); + return new CreateUsersAndGroupsClient(usersPerThread, connectionPool, configuration); } public static void main(String[] argv) throws ClientPoolException, StorageClientException, - AccessDeniedException, ClassNotFoundException { + AccessDeniedException, ClassNotFoundException, IOException { int totalUsers = 1000; int nthreads = 10; @@ -55,9 +64,16 @@ public static void main(String[] argv) throws ClientPoolException, StorageClient if (argv.length > 1) { totalUsers = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalUsers); } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); CreateUsersAndGroupsSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsSoak( - totalUsers, MysqlSetup.getClientPool()); + totalUsers, MysqlSetup.getClientPool(configuration), configuration); createUsersAndGroupsSoak.launchSoak(nthreads); } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsWithMembersSoak.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsWithMembersSoak.java similarity index 75% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsWithMembersSoak.java rename to drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsWithMembersSoak.java index caf671b8..cc146ded 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsWithMembersSoak.java +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/CreateUsersAndGroupsWithMembersSoak.java @@ -17,24 +17,33 @@ */ package org.sakaiproject.nakamura.lite.soak.mysql; +import com.google.common.collect.Maps; + import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.StorageClientUtils; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; import org.sakaiproject.nakamura.lite.jdbc.mysql.MysqlSetup; import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsWithMembersClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import java.io.IOException; +import java.util.Map; public class CreateUsersAndGroupsWithMembersSoak extends AbstractSoakController { private int totalUsers; private StorageClientPool connectionPool; private int totalGroups; + private Configuration configuration; public CreateUsersAndGroupsWithMembersSoak(int totalUsers, int totalGroups, - StorageClientPool connectionPool) { + StorageClientPool connectionPool, Configuration configuration) { super(totalUsers); + this.configuration = configuration; this.connectionPool = connectionPool; this.totalUsers = totalUsers; this.totalGroups = totalGroups; @@ -45,11 +54,11 @@ protected Runnable getRunnable(int nthreads) throws ClientPoolException, int usersPerThread = totalUsers / nthreads; int groupsPerThread = totalGroups / nthreads; return new CreateUsersAndGroupsWithMembersClient(usersPerThread, groupsPerThread, - connectionPool); + connectionPool, configuration); } public static void main(String[] argv) throws ClientPoolException, StorageClientException, - AccessDeniedException, ClassNotFoundException { + AccessDeniedException, ClassNotFoundException, IOException { int totalUsers = 1000; int totalGroups = 1000; @@ -64,9 +73,16 @@ public static void main(String[] argv) throws ClientPoolException, StorageClient if (argv.length > 2) { totalGroups = StorageClientUtils.getSetting(Integer.valueOf(argv[2]), totalUsers); } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); CreateUsersAndGroupsWithMembersSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsWithMembersSoak( - totalUsers, totalGroups, MysqlSetup.getClientPool()); + totalUsers, totalGroups, MysqlSetup.getClientPool(configuration), configuration); createUsersAndGroupsSoak.launchSoak(nthreads); } diff --git a/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/SoakAll.java b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/SoakAll.java similarity index 92% rename from src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/SoakAll.java rename to drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/SoakAll.java index 58480d0a..00a020d0 100644 --- a/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/SoakAll.java +++ b/drivers/jdbc/src/test/java/org/sakaiproject/nakamura/lite/soak/mysql/SoakAll.java @@ -21,10 +21,12 @@ import org.sakaiproject.nakamura.api.lite.StorageClientException; import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import java.io.IOException; + public class SoakAll { public static void main(String[] argv) throws ClientPoolException, StorageClientException, - AccessDeniedException, ClassNotFoundException { + AccessDeniedException, ClassNotFoundException, IOException { CreateUsersAndGroupsSoak.main(argv); CreateUsersAndGroupsWithMembersSoak.main(argv); } diff --git a/drivers/mongo/pom.xml b/drivers/mongo/pom.xml new file mode 100644 index 00000000..b95b94ee --- /dev/null +++ b/drivers/mongo/pom.xml @@ -0,0 +1,91 @@ + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.mongo-driver + bundle + 0.1-SNAPSHOT + Sparse Map :: MongoDB SPI Implementation + Storage SPI implementation using MongoDB + + scm:git:git://github.com/sakaiproject/sparsemapcontent.git + scm:git:git@github.com:sakaiproject/sparsemapcontent.git + http://github.com/sakaiproject/sparsemapcontent/ + + + UTF-8 + + + + + org.apache.felix + maven-bundle-plugin + true + + + driver + !* + !* + + org.sakaiproject.nakamura.core + org.sakaiproject.nakamura.lite.storage.mongo.* + + + + + + + + org.mongodb + mongo-java-driver + 2.7.2 + + + + org.apache.felix + org.apache.felix.scr.annotations + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5.1-SNAPSHOT + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + tests + 1.5.1-SNAPSHOT + test + + + org.slf4j + slf4j-api + 1.5.10 + + + org.slf4j + slf4j-simple + 1.5.10 + test + + + junit + junit + 4.4 + test + + + findbugs + annotations + 1.0.0 + provided + + + + diff --git a/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/GridFSContentHelper.java b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/GridFSContentHelper.java new file mode 100644 index 00000000..de111d4a --- /dev/null +++ b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/GridFSContentHelper.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.storage.mongo; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Calendar; +import java.util.GregorianCalendar; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.lite.storage.spi.RowHasher; +import org.sakaiproject.nakamura.lite.storage.spi.content.StreamedContentHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Maps; +import com.mongodb.DB; +import com.mongodb.gridfs.GridFS; +import com.mongodb.gridfs.GridFSDBFile; +import com.mongodb.gridfs.GridFSInputFile; + +/** + * Store content bodies in GridFS. + * http://www.mongodb.org/display/DOCS/GridFS + * + * This file started as a copy of org.sakaiproject.nakamura.lite.content.FileStreamContentHelper + * in case you didn't notice. It took remarkably little changing to turn it into a GridFS helper. + */ +public class GridFSContentHelper implements StreamedContentHelper { + + private static final Logger LOGGER = LoggerFactory.getLogger(GridFSContentHelper.class); + + private static final String STORE_LOCATION_FIELD = Repository.SYSTEM_PROP_PREFIX + "bodyLocation"; + + private GridFS contentBodies; + + private RowHasher rowHasher; + + public GridFSContentHelper(DB mongodb, RowHasher rowHasher, Map properties) { + this.rowHasher = rowHasher; + + String bucket = StorageClientUtils.getSetting(properties.get(MongoClientPool.PROP_BUCKET), GridFS.DEFAULT_BUCKET); + this.contentBodies = new GridFS(mongodb, bucket); + } + + public InputStream readBody(String keySpace, String columnFamily, String contentBlockId, String streamId, + Map content) throws IOException { + // give me whatever is stored in the property _bodyLocation/streamId + String path = (String) content.get(StorageClientUtils.getAltField(STORE_LOCATION_FIELD, streamId)); + LOGGER.debug("Reading from {} as body of {}:{}:{} ", new Object[] { path, keySpace, columnFamily, contentBlockId }); + GridFSDBFile file = contentBodies.findOne(path); + if ( file != null ) { + return file.getInputStream(); + } else { + return null; + } + } + + public Map writeBody(String keySpace, String columnFamily, String contentId, + String contentBlockId, String streamId, Map content, InputStream in) throws IOException, + StorageClientException { + String path = getPath(keySpace, columnFamily, contentBlockId); + GridFSInputFile file = contentBodies.createFile(in, path); + file.save(); + LOGGER.debug("Wrote {} bytes to {} as body of {}:{}:{} stream {} ", new Object[] { file.getLength(), path, + keySpace, columnFamily, contentBlockId, streamId }); + Map metadata = Maps.newHashMap(); + metadata.put(StorageClientUtils.getAltField(Content.LENGTH_FIELD, streamId), file.getLength()); + metadata.put(StorageClientUtils.getAltField(Content.BLOCKID_FIELD, streamId), contentBlockId); + metadata.put(StorageClientUtils.getAltField(STORE_LOCATION_FIELD, streamId), path); + return metadata; + } + + private String getPath(String keySpace, String columnFamily, String contentBlockId) + throws StorageClientException { + Calendar c = new GregorianCalendar(); + c.setTimeInMillis(System.currentTimeMillis()); + int year = c.get(Calendar.YEAR); + int month = c.get(Calendar.MONTH); + String rowHash = rowHasher.rowHash(keySpace, columnFamily, contentBlockId); + return year + "/" + month + "/" + rowHash.substring(0, 2) + "/" + rowHash.substring(2, 4) + + "/" + rowHash.substring(4, 6) + "/" + rowHash; + } + + public boolean hasStream(Map content, String streamId ) { + String path = (String) content.get(StorageClientUtils.getAltField(STORE_LOCATION_FIELD, streamId)); + GridFSDBFile file = contentBodies.findOne(path); + return file != null; + } +} \ No newline at end of file diff --git a/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/MongoClient.java b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/MongoClient.java new file mode 100644 index 00000000..a1c37677 --- /dev/null +++ b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/MongoClient.java @@ -0,0 +1,424 @@ +package org.sakaiproject.nakamura.lite.storage.mongo; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.StorageConstants; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.lite.storage.spi.DirectCacheAccess; +import org.sakaiproject.nakamura.lite.storage.spi.DisposableIterator; +import org.sakaiproject.nakamura.lite.storage.spi.Disposer; +import org.sakaiproject.nakamura.lite.storage.spi.RowHasher; +import org.sakaiproject.nakamura.lite.storage.spi.SparseMapRow; +import org.sakaiproject.nakamura.lite.storage.spi.SparseRow; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientListener; +import org.sakaiproject.nakamura.lite.storage.spi.content.StreamedContentHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.mongodb.BasicDBObject; +import com.mongodb.DB; +import com.mongodb.DBCollection; +import com.mongodb.DBCursor; +import com.mongodb.DBObject; +import com.mongodb.MongoException; + +/** + * + * A {@link StorageClient} for SpaeseMapContent that uses MongoDB as a backend. + * + * For the most part the concepts and objects in SMC and MongoDB are very similar. + * + * Sparse Map Content => MongoDB + * + * Column Family => Collection + * Content Object => Document + * Content Property => Document Field + * + * Both have one method for saving data. SMC is insert, MongoDB is update. + * We use the MongoDB update with the upsert flag set to true so these methods + * are equivalent abd we'll just use the term upsert. + * + * There are some subtle differences do be aware of though. + * + * 1.Both SMC and MongoDB think they own a property on each object named _id. + * + * This is not the case. In reality MongoDB owns it. This driver takes pain to + * rewrite the _id property on upsert to another key. This way Mongo manages + * the _id property in the underlying storage and we don't step on its toes too much. + * + * 2. MongoDB treats .'s in field names as nested documents + * + * If you try to store a field on x with the name a.b and value 1 in MongoDB, + * you will actually store: + * x.a = { b : 1 } + * + * This driver changes .'s in field names to some obscure constant string before it + * upserts. When reading data out of Mongo we change it back to . so noone knows our + * little secret. shh. This is a bit of a hack. + * + */ +public class MongoClient implements StorageClient, RowHasher { + + private static final Logger log = LoggerFactory.getLogger(MongoClient.class); + + // This belongs to MongoDB. Never set this field. + public static final String MONGO_INTERNAL_ID_FIELD = "_id"; + + // This primary id as far as SMC is concerned + // unless there iss an entry in alternatKeys for this columnFamily + public static final String MONGO_INTERNAL_SPARSE_UUID_FIELD = Repository.SYSTEM_PROP_PREFIX + "smcid"; + + // Connection to MongoDB + private DB mongodb; + + // SMC may use something other than _id as its key + private Map alternateKeys; + + // Reads and Writes file content to a filesystem + private StreamedContentHelper streamedContentHelper; + + // Throws events for the migration framework + private StorageClientListener storageClientListener; + + private Map props; + + @SuppressWarnings("unchecked") + public MongoClient(DB mongodb, Map props) { + this.mongodb = mongodb; + this.props = props; + + String user = StorageClientUtils.getSetting(props.get(MongoClientPool.PROP_MONGO_USER), null); + String password = StorageClientUtils.getSetting(props.get(MongoClientPool.PROP_MONGO_USER), null); + + if (user != null && password != null && !this.mongodb.isAuthenticated()){ + if (!this.mongodb.authenticate(user, password.toCharArray())){ + throw new MongoException("Unable to authenticate"); + } + } + + this.alternateKeys = (Map)props.get(MongoClientPool.PROP_ALT_KEYS); + this.streamedContentHelper = new GridFSContentHelper(mongodb, this, props); + this.mongodb.requestStart(); + } + + public Map get(String keySpace, String columnFamily, + String key) throws StorageClientException { + columnFamily = columnFamily.toLowerCase(); + log.debug("get {}:{}:{}", new Object[]{keySpace, columnFamily, key}); + DBCollection collection = mongodb.getCollection(columnFamily); + + DBObject query = null; + if (alternateKeys.containsKey(columnFamily)) { + String altKey = alternateKeys.get(columnFamily); + query = new BasicDBObject(altKey, key); + } + else { + query = new BasicDBObject(MONGO_INTERNAL_SPARSE_UUID_FIELD, key); + } + DBCursor cursor = collection.find(query); + + // Check the result and return it. + Map result = null; + if (cursor.size() == 1){ + result = MongoUtils.convertDBObjectToMap(cursor.next()); + } + if (result == null){ + result = new HashMap(); + } + return result; + } + + public void insert(String keySpace, String columnFamily, String key, + Map values, boolean probablyNew) + throws StorageClientException { + columnFamily = columnFamily.toLowerCase(); + HashMap mutableValues = new HashMap(values); + + // rewrite _id => MongoClient.MONGO_INTERNAL_SPARSE_UUID_FIELD + if (mutableValues.containsKey(MongoClient.MONGO_INTERNAL_ID_FIELD)){ + mutableValues.put(MongoClient.MONGO_INTERNAL_SPARSE_UUID_FIELD, + mutableValues.get(MongoClient.MONGO_INTERNAL_ID_FIELD)); + mutableValues.remove(MongoClient.MONGO_INTERNAL_ID_FIELD); + } + + // Set the parent path hash if this is a piece of content that is not a root (roots are orphans) + if (mutableValues.keySet().contains(Content.PATH_FIELD) && !StorageClientUtils.isRoot(key)) { + mutableValues.put(Content.PARENT_HASH_FIELD, + rowHash(keySpace, columnFamily, StorageClientUtils.getParentObjectPath(key))); + } + + DBCollection collection = mongodb.getCollection(columnFamily); + + // The document to update identified its _smcid or _aclKey + DBObject query = null; + + if (alternateKeys.containsKey(columnFamily)) { + String altKey = alternateKeys.get(columnFamily); + query = new BasicDBObject(altKey, key); + mutableValues.put(altKey, key); + } + else { + query = new BasicDBObject(MONGO_INTERNAL_SPARSE_UUID_FIELD, key); + mutableValues.put(MONGO_INTERNAL_SPARSE_UUID_FIELD, key); + } + + // Converts the insert into a bunch of set, unset Mongo operations + DBObject insert = MongoUtils.cleanPropertiesForInsert(mutableValues); + + Map mapBefore = this.get(keySpace, columnFamily, key); + if ( storageClientListener != null ) { + storageClientListener.before(keySpace, columnFamily, key, mapBefore); + } + + // Update or insert a single document. + collection.update(query, insert, true, false); + log.debug("insert {}:{}:{} => {}", new Object[] {keySpace, columnFamily, key, insert.toString()}); + + if ( storageClientListener != null ) { + storageClientListener.after(keySpace, columnFamily, key, mutableValues); + } + } + + public void remove(String keySpace, String columnFamily, String key) + throws StorageClientException { + columnFamily = columnFamily.toLowerCase(); + DBCollection collection = mongodb.getCollection(columnFamily); + + // Soft delete content + if (columnFamily.equals((String)props.get(MongoClientPool.PROP_CONTENT_COLLECTION))){ + insert(keySpace, columnFamily, key, ImmutableMap.of(DELETED_FIELD, (Object)TRUE), false); + } + else { + collection.remove(new BasicDBObject(MONGO_INTERNAL_SPARSE_UUID_FIELD, key)); + } + log.debug("remove {}:{}:{}", new Object[]{keySpace, columnFamily, key}); + } + + public DisposableIterator listAll(String keySpace, + String columnFamily) throws StorageClientException { + columnFamily = columnFamily.toLowerCase(); + DBCollection collection = mongodb.getCollection(columnFamily); + log.debug("listAll {}:{}", new Object[]{keySpace, columnFamily}); + + final DBCursor cursor = collection.find(); + final Iterator itr = cursor.iterator(); + + return new DisposableIterator() { + + public boolean hasNext() { + return itr.hasNext(); + } + + public SparseRow next() { + DBObject next = itr.next(); + return new SparseMapRow((String)next.get(MONGO_INTERNAL_SPARSE_UUID_FIELD), + MongoUtils.convertDBObjectToMap(next)); + } + + public void close() { + cursor.close(); + mongodb.requestDone(); + } + public void remove() { } + public void setDisposer(Disposer disposer) { } + }; + } + + public long allCount(String keySpace, String columnFamily) + throws StorageClientException { + columnFamily = columnFamily.toLowerCase(); + log.debug("allCount {}:{}", new Object[]{keySpace, columnFamily}); + DBCollection collection = mongodb.getCollection(columnFamily); + return collection.count(); + } + + public InputStream streamBodyOut(String keySpace, String columnFamily, + String contentId, String contentBlockId, String streamId, + Map content) throws StorageClientException, + AccessDeniedException, IOException { + columnFamily = columnFamily.toLowerCase(); + return streamedContentHelper.readBody(keySpace, columnFamily, contentBlockId, streamId, content); + } + + public Map streamBodyIn(String keySpace, + String columnFamily, String contentId, String contentBlockId, + String streamId, Map content, InputStream in) + throws StorageClientException, AccessDeniedException, IOException { + columnFamily = columnFamily.toLowerCase(); + Map meta = streamedContentHelper.writeBody(keySpace, columnFamily, contentId, contentBlockId, streamId, content, in); + return meta; + } + + + @SuppressWarnings("unchecked") + public DisposableIterator> find(String keySpace, + String columnFamily, Map properties, DirectCacheAccess cachingManager) + throws StorageClientException { + + columnFamily = columnFamily.toLowerCase(); + DBCollection collection = mongodb.getCollection(columnFamily); + BasicDBObject query = new BasicDBObject(); + + for (Entry e : properties.entrySet()){ + Object val = e.getValue(); + String key = MongoUtils.escapeFieldName(e.getKey()); + + if (val instanceof Map){ + // This is how it comes from sparse + // properties = { "orset0" : { "fieldName" : [ "searchVal0", "searchVal1" ] } } + Map multiValQueryMap = (Map) val; + String field = multiValQueryMap.keySet().iterator().next(); + List searchValues = (List)multiValQueryMap.get(field); + + // This is what mongo expects + // mongoQuery = { "$or" : [ BasicDBObject("field", "val0"), + // BasicDBObject("field", "val1") ] } + ArrayList mongoQuery = new ArrayList(); + for(String searchVal: searchValues){ + mongoQuery.add(new BasicDBObject(field, searchVal)); + } + + if (key.startsWith("orset")){ + // Remove the original query and add a Mongo OR query. + query.remove(key); + query.put(Operators.OR, mongoQuery); + } + } + else if (val instanceof List){ + // What mongo expects + // { "fieldName" : { "$all" : [ "valueX", "valueY" ] } } + List valList = (List)val; + BasicDBObject mongoSet = new BasicDBObject(); + mongoSet.put(Operators.ALL, valList); + // overwrite the original value of key + query.put(key, mongoSet); + } + else { + query.put(key, val); + } + } + + /* + * Support the custom count queries. + * TODO: A better way to define custom queries dynamically. + * Maybe a list of JSON queries and use mongodo.eval(...)? + */ + String customStatementSet = query.getString(StorageConstants.CUSTOM_STATEMENT_SET); + if (customStatementSet != null && "countestimate".equals(customStatementSet)){ + query.remove(StorageConstants.CUSTOM_STATEMENT_SET); + query.remove(StorageConstants.RAWRESULTS); + final int count = (int)collection.count(query); + + return new DisposableIterator>() { + private boolean hasNext = true; + + // Return true only once. + public boolean hasNext() { + if (hasNext){ + hasNext = false; + return true; + } + return hasNext; + } + + public Map next() { + return ImmutableMap.of("1", (Object)Integer.valueOf(count)); + } + public void remove() { } + public void close() { mongodb.requestDone(); } + public void setDisposer(Disposer disposer) { } + }; + } + else { + // See if we need to sort + final DBCursor cursor = collection.find(query); + if (properties.containsKey(StorageConstants.SORT)){ + query.remove(StorageConstants.SORT); + cursor.sort(new BasicDBObject((String)properties.get(StorageConstants.SORT), 1)); + } + final Iterator itr = cursor.iterator(); + + // Iterator with the results. + return new DisposableIterator>() { + public boolean hasNext() { + return itr.hasNext(); + } + public Map next() { + return MongoUtils.convertDBObjectToMap(itr.next()); + } + public void close() { + cursor.close(); + mongodb.requestDone(); + } + public void remove() { } + public void setDisposer(Disposer disposer) { } + }; + } + } + + public void close() { + log.debug("Closed"); + this.mongodb.requestDone(); + } + + public DisposableIterator> listChildren( + String keySpace, String columnFamily, String key, DirectCacheAccess cachingManager) + throws StorageClientException { + columnFamily = columnFamily.toLowerCase(); + // Hash the object we're considering + String hash = rowHash(keySpace, columnFamily, key); + log.debug("Finding {}:{}:{} as {} ", new Object[]{keySpace,columnFamily, key, hash}); + // Issue a query for anyone who lists that hash as their parent. + return find(keySpace, columnFamily, ImmutableMap.of(Content.PARENT_HASH_FIELD, (Object)hash), cachingManager); + } + + public boolean hasBody(Map content, String streamId) { + // Is there a binary stream of data for this object with this streamId? + return streamedContentHelper.hasStream(content, streamId); + } + + /** + * Generate the row id hashes needed to maintain ids and relationships in sparse. + */ + public String rowHash(String keySpace, String columnFamily, String key) + throws StorageClientException { + columnFamily = columnFamily.toLowerCase(); + MessageDigest hasher; + try { + hasher = MessageDigest.getInstance("SHA1"); + } catch (NoSuchAlgorithmException e1) { + throw new StorageClientException("Unable to get hash algorithm " + e1.getMessage(), e1); + } + String keystring = keySpace + ":" + columnFamily + ":" + key; + byte[] ridkey; + try { + ridkey = keystring.getBytes("UTF8"); + } catch (UnsupportedEncodingException e) { + ridkey = keystring.getBytes(); + } + String hash = StorageClientUtils.encode(hasher.digest(ridkey)); + log.debug("rowHash: {}:{}:{} => {}", new Object[]{keySpace, columnFamily, key, hash}); + return hash; + } + + public void setStorageClientListener( + StorageClientListener storageClientListener) { + this.storageClientListener = storageClientListener; + } +} diff --git a/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/MongoClientPool.java b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/MongoClientPool.java new file mode 100644 index 00000000..b43c3f79 --- /dev/null +++ b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/MongoClientPool.java @@ -0,0 +1,144 @@ +package org.sakaiproject.nakamura.lite.storage.mongo; + +import java.net.UnknownHostException; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Modified; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageCacheManager; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClient; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.mongodb.BasicDBObject; +import com.mongodb.DB; +import com.mongodb.DBCollection; +import com.mongodb.Mongo; +import com.mongodb.MongoException; +import com.mongodb.MongoURI; + +@Component(immediate = true, metatype = true) +@Service +public class MongoClientPool implements StorageClientPool { + + protected Mongo mongo; + protected DB db; + + private static final String DEFAULT_MONGO_URI = "mongodb://127.0.0.1/?maxpoolsize=1000"; + @Property(value = DEFAULT_MONGO_URI) + public static final String PROP_MONGO_URI = "mongo.uri"; + + private static final String DEFAULT_MONGO_DB = "nakamura"; + @Property(value = DEFAULT_MONGO_DB) + public static final String PROP_MONGO_DB = "mongo.db"; + + private static final String DEFAULT_MONGO_USER = "nakamura"; + @Property(value = DEFAULT_MONGO_USER) + public static final String PROP_MONGO_USER = "mongo.user"; + + private static final String DEFAULT_MONGO_PASSWORD = "nakamura"; + @Property(value = DEFAULT_MONGO_PASSWORD) + public static final String PROP_MONGO_PASSWORD = "mongo.password"; + + private static final String DEFAULT_BUCKET = "smc_content_bodies"; + @Property(value = DEFAULT_BUCKET) + public static final String PROP_BUCKET = "mongo.gridfs.bucket"; + + public static final String PROP_AUTHORIZABLE_COLLECTION = "au"; + public static final String PROP_ACL_COLLECTION = "ac"; + public static final String PROP_CONTENT_COLLECTION = "cn"; + + public static final String PROP_ALT_KEYS = "mongo.alternate.keys"; + public static final String[] DEFAULT_ALT_KEYS = new String[] { "ac:" + AccessControlManagerImpl._KEY , }; + + + + private StorageCacheManager storageManagerCache; + + @Reference + private Configuration configuration; + + private Map props; + + + @Activate + @Modified + public void activate(Map props) throws MongoException, UnknownHostException { + this.props = new HashMap(props); + this.mongo = new Mongo(new MongoURI(StorageClientUtils.getSetting(props.get(PROP_MONGO_URI), DEFAULT_MONGO_URI))); + this.db = mongo.getDB(StorageClientUtils.getSetting(props.get(PROP_MONGO_DB), DEFAULT_MONGO_DB)); + + this.props.put(PROP_AUTHORIZABLE_COLLECTION, configuration.getAuthorizableColumnFamily()); + this.props.put(PROP_ACL_COLLECTION, configuration.getAclColumnFamily()); + this.props.put(PROP_CONTENT_COLLECTION, configuration.getContentColumnFamily()); + + Builder altKeyBuilder = new ImmutableMap.Builder(); + String[] altKeyConfigs = StorageClientUtils.getSetting(props.get(PROP_ALT_KEYS), DEFAULT_ALT_KEYS); + for (String altKey : altKeyConfigs){ + String[] spl = StringUtils.split(altKey, ":"); + altKeyBuilder.put(spl[0], spl[1]); + } + this.props.put(PROP_ALT_KEYS, altKeyBuilder.build()); + + initCache(); + initIndexes(); + } + + private void initIndexes() { + // index _smcid on au and cn + for (String name: new String[] {configuration.getContentColumnFamily(), configuration.getAuthorizableColumnFamily() }){ + if (!db.collectionExists(name)){ + DBCollection collection = db.createCollection(name, null); + collection.ensureIndex(new BasicDBObject(MongoClient.MONGO_INTERNAL_SPARSE_UUID_FIELD, 1), + MongoClient.MONGO_INTERNAL_SPARSE_UUID_FIELD + "_index", true); + } + } + + DBCollection collection; + + // index _aclKey on ac + collection = db.createCollection(configuration.getAclColumnFamily(), null); + collection.ensureIndex(new BasicDBObject(AccessControlManagerImpl._KEY, 1), + AccessControlManagerImpl._KEY + "_index", + false); + + // Apply the other indexes + for (String toIndex: configuration.getIndexColumnNames()){ + String columnFamily = StringUtils.trimToNull(StringUtils.substringBefore(toIndex, ":")); + String keyName = StringUtils.trimToNull(StringUtils.substringAfter(toIndex, ":")); + if (columnFamily != null && keyName != null){ + collection = db.getCollection(columnFamily); + collection.ensureIndex(new BasicDBObject(keyName, 1), keyName + "_index", false); + } + } + } + + private void initCache() { + } + + public StorageClient getClient() throws ClientPoolException { + return new MongoClient(db, props); + } + + public StorageCacheManager getStorageCacheManager() { + if ( storageManagerCache != null ) { + return storageManagerCache; + } + return null; + } + + public void bindConfiguration(Configuration configuration) { + this.configuration = configuration; + } +} diff --git a/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/MongoUtils.java b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/MongoUtils.java new file mode 100644 index 00000000..81e70ca2 --- /dev/null +++ b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/MongoUtils.java @@ -0,0 +1,176 @@ +package org.sakaiproject.nakamura.lite.storage.mongo; + +import java.math.BigDecimal; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.TimeZone; +import java.util.regex.Matcher; + +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.api.lite.content.Content; + +import com.mongodb.BasicDBList; +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; + +public class MongoUtils { + + /* + * MongoDB does not allow . and $ in a field name. + * http://www.mongodb.org/display/DOCS/Legal+Key+Names + * + * Noone should be able to type this into the UX and hopefully the UX devs are + * not going to pick this as a chaaratcer in their field names not allow users to + * create arbitrary fields. + * + */ + public static final String MONGO_FIELD_DOT_REPLACEMENT = "\u00B6"; + public static final String MONGO_FIELD_DOLLAR_REPLACEMENT = "\u00A7"; + + // _:mongo: + public static final String MONGO_INTERNAL_FIELD_PREFIX = Content.INTERNAL_FIELD_PREFIX + "mongo:"; + // _:mongo:bd: + public static final String MONGO_BIGDECIMAL_FIELD_PREFIX = MONGO_INTERNAL_FIELD_PREFIX + "bd:"; + // _:mongo:tz: + public static final String MONGO_TIMEZONE_FIELD_PREFIX = MONGO_INTERNAL_FIELD_PREFIX + "tz:"; + + /** + * Take the properties as given by sparsemap and modify them for insertion into mongo. + * @param props the properties of this content + * @return the properties ready for Mongo + */ + public static DBObject cleanPropertiesForInsert(Map props) { + DBObject cleaned = new BasicDBObject(); + DBObject removeFields = new BasicDBObject(); + DBObject updatedFields = new BasicDBObject(); + + // Partition the properties into update and remove ops + for (Entry e : props.entrySet()){ + Object value = e.getValue(); + String key = escapeFieldName(e.getKey()); + // Replace the sparse RemoveProperty with the Mongo $unset. + if (value instanceof RemoveProperty){ + removeFields.put(key, 1); + } + else if (value instanceof Calendar || value instanceof GregorianCalendar){ + updatedFields.put(key, ((Calendar)value).getTime()); + updatedFields.put(MONGO_TIMEZONE_FIELD_PREFIX + key, ((Calendar)value).getTimeZone().getID()); + } + else if (value instanceof BigDecimal){ + updatedFields.put(MONGO_BIGDECIMAL_FIELD_PREFIX + key, ((BigDecimal)value).toString()); + } + else if (value != null) { + updatedFields.put(key, value); + } + } + // Remove the _smcid field so we dont change it. + if (updatedFields.containsField(MongoClient.MONGO_INTERNAL_ID_FIELD)){ + updatedFields.removeField(MongoClient.MONGO_INTERNAL_ID_FIELD); + } + if (updatedFields.keySet().size() > 0){ + cleaned.put(Operators.SET, updatedFields); + } + if (removeFields.keySet().size() > 0){ + cleaned.put(Operators.UNSET, removeFields); + } + return cleaned; + } + + /** + * Convert a {@link DBObject} from into something that the rest of sparse can work with. + * @param dbo the object fetched from the DB. + * @return the dbo as a Map. + */ + public static Map convertDBObjectToMap(DBObject dbo){ + if (dbo == null){ + return null; + } + List toRemove = new ArrayList(); + Map map = new HashMap(); + for (String key: dbo.keySet()){ + Object val = dbo.get(key); + key = unescapeFieldName(key); + // The rest of sparsemapcontent expects Arrays. + // Mongo returns {@link BasicDBList}s no matter what. + if (val instanceof BasicDBList){ + BasicDBList dbl = (BasicDBList) val; + // Not really happy about using a String[] here + // but it makes more tests pass in the ContentManagerFinderImplMan case. + map.put(key, dbl.toArray(new String[0])); + } + else if (val instanceof Date){ + Calendar cal = new GregorianCalendar(); + cal.setTime((Date)val); + String tzKey = MONGO_TIMEZONE_FIELD_PREFIX + key; + // Was this date stored as a Calendar? + // If so we'll have a secondary field _:mongo:tz:key that holds + // the timezone id. + if (dbo.keySet().contains(tzKey)){ + toRemove.add(tzKey); + cal.setTimeZone(TimeZone.getTimeZone((String)dbo.get(tzKey))); + } + map.put(key, cal); + } + // Convert serialized BigDecimal values back to BigDecimal + else if (key.startsWith(MONGO_BIGDECIMAL_FIELD_PREFIX)){ + String[] spl = key.split(":"); + String bdKey = spl[spl.length - 1]; + map.put(bdKey, new BigDecimal((String)val)); + toRemove.add(key); + } + else { + map.put(key, val); + } + } + // Remove keys + for (String key: toRemove){ + map.remove(key); + } + + // Delete the Mongo-supplied internal _id + if (map.containsKey(MongoClient.MONGO_INTERNAL_ID_FIELD)){ + map.remove(MongoClient.MONGO_INTERNAL_ID_FIELD); + } + // Rename the sparse id property to InternalContent.getUuidField() so the rest of sparse can use that field name. + // _smcid -> _id + if (map.containsKey(MongoClient.MONGO_INTERNAL_SPARSE_UUID_FIELD)){ + map.put(Content.UUID_FIELD, map.get(MongoClient.MONGO_INTERNAL_SPARSE_UUID_FIELD)); + map.remove(MongoClient.MONGO_INTERNAL_SPARSE_UUID_FIELD); + } + return map; + } + + /** + * Create a key that's safe to use as a field name in MongoDB + * @param fieldName the SMC property key + * @return the MongoDB field name + */ + public static String escapeFieldName(String fieldName) { + if (fieldName == null){ + return null; + } + fieldName = fieldName.replaceAll("\\.", MONGO_FIELD_DOT_REPLACEMENT); + fieldName = fieldName.replaceAll("\\$", MONGO_FIELD_DOLLAR_REPLACEMENT); + return fieldName; + } + + /** + * Transform the MongoDB document field name into a SMC property key. + * @param fieldName the name of the field in the MongoDB document. + * @return the property key in SMC + */ + public static String unescapeFieldName(String fieldName) { + if (fieldName == null){ + return null; + } + fieldName = fieldName.replaceAll(MONGO_FIELD_DOT_REPLACEMENT, "."); + fieldName = fieldName.replaceAll(MONGO_FIELD_DOLLAR_REPLACEMENT, Matcher.quoteReplacement("$")); + return fieldName; + } +} diff --git a/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/Operators.java b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/Operators.java new file mode 100644 index 00000000..3520ff97 --- /dev/null +++ b/drivers/mongo/src/main/java/org/sakaiproject/nakamura/lite/storage/mongo/Operators.java @@ -0,0 +1,13 @@ +package org.sakaiproject.nakamura.lite.storage.mongo; + +/** + * MongoDB query operators. + * http://www.mongodb.org/display/DOCS/Advanced+Queries#AdvancedQueries + * http://www.mongodb.org/display/DOCS/Updating + */ +public class Operators { + public static final String OR = "$or"; + public static final String SET = "$set"; + public static final String UNSET = "$unset"; + public static final String ALL = "$all"; +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/AccessControlManagerImplTest.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/AccessControlManagerImplTest.java new file mode 100644 index 00000000..b6f06606 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/AccessControlManagerImplTest.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.mongo; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.accesscontrol.AbstractAccessControlManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class AccessControlManagerImplTest extends AbstractAccessControlManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException { + return MongoSetup.getClientPool(configuration); + } + +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/AuthorizableManagerImplTest.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/AuthorizableManagerImplTest.java new file mode 100644 index 00000000..317d9596 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/AuthorizableManagerImplTest.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.mongo; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.authorizable.AbstractAuthorizableManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class AuthorizableManagerImplTest extends AbstractAuthorizableManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException { + return MongoSetup.getClientPool(configuration); + } + +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/ContentManagerFinderImplTest.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/ContentManagerFinderImplTest.java new file mode 100644 index 00000000..34ad2a25 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/ContentManagerFinderImplTest.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.mongo; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerFinderTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class ContentManagerFinderImplTest extends AbstractContentManagerFinderTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException { + return MongoSetup.getClientPool(configuration); + } + +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/ContentManagerImplTest.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/ContentManagerImplTest.java new file mode 100644 index 00000000..27774198 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/ContentManagerImplTest.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.mongo; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.content.AbstractContentManagerTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class ContentManagerImplTest extends AbstractContentManagerTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) + throws ClassNotFoundException { + return MongoSetup.getClientPool(configuration); + } + +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/LockManagerImplTest.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/LockManagerImplTest.java new file mode 100644 index 00000000..8b0f1b34 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/LockManagerImplTest.java @@ -0,0 +1,13 @@ +package org.sakaiproject.nakamura.lite.mongo; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.lock.AbstractLockManagerImplTest; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +public class LockManagerImplTest extends AbstractLockManagerImplTest { + + @Override + protected StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + return MongoSetup.getClientPool(configuration); + } +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/MongoSetup.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/MongoSetup.java new file mode 100644 index 00000000..8be9ce48 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/MongoSetup.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.mongo; + +import java.net.UnknownHostException; + +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.lite.storage.mongo.MongoClientPool; + +import com.google.common.collect.ImmutableMap; +import com.mongodb.MongoException; + +public class MongoSetup { + + + public synchronized static MongoClientPool createClientPool(Configuration configuration) { + try { + MongoClientPool connectionPool = new MongoClientPool(); + connectionPool.bindConfiguration(configuration); + connectionPool + .activate(ImmutableMap + .of(MongoClientPool.PROP_MONGO_URI, (Object) "mongodb://127.0.0.1", + MongoClientPool.PROP_MONGO_DB, (Object) "smc_unittests")); + return connectionPool; + } catch (MongoException e) { + throw new RuntimeException(e.getMessage(), e); + } catch (UnknownHostException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + public static MongoClientPool getClientPool(Configuration configuration) { + return createClientPool(configuration); + } +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/MongoUtilsTest.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/MongoUtilsTest.java new file mode 100644 index 00000000..7c476307 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/mongo/MongoUtilsTest.java @@ -0,0 +1,176 @@ +package org.sakaiproject.nakamura.lite.mongo; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.HashMap; +import java.util.Map; +import java.util.TimeZone; + +import junit.framework.TestCase; + +import org.junit.Test; +import org.sakaiproject.nakamura.api.lite.RemoveProperty; +import org.sakaiproject.nakamura.lite.storage.mongo.MongoClient; +import org.sakaiproject.nakamura.lite.storage.mongo.MongoUtils; +import org.sakaiproject.nakamura.lite.storage.mongo.Operators; + +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; + +public class MongoUtilsTest extends TestCase { + + private static final String fieldName = "dots.and$_x"; + private static final String escaped = "dots" + MongoUtils.MONGO_FIELD_DOT_REPLACEMENT + "and" + + MongoUtils.MONGO_FIELD_DOLLAR_REPLACEMENT + "_x"; + + @Test + public void testCleanPropertiesPrunesNulls(){ + Map props = new HashMap(); + props.put("NULL", null); + DBObject cleaned = MongoUtils.cleanPropertiesForInsert(props); + assertTrue(cleaned.keySet().isEmpty()); + } + + @Test + public void testCleanPropertiesHandlesRemoveProperty(){ + Map props = new HashMap(); + props.put("toRemove", new RemoveProperty()); + DBObject cleaned = MongoUtils.cleanPropertiesForInsert(props); + DBObject unset = (DBObject)cleaned.get(Operators.UNSET); + assertFalse(cleaned.containsField(Operators.SET)); + assertTrue(unset.containsField("toRemove")); + } + + @Test + public void testCleanPropertiesHandlesIds(){ + Map props = new HashMap(); + props.put(MongoClient.MONGO_INTERNAL_ID_FIELD, "ID"); + DBObject cleaned = MongoUtils.cleanPropertiesForInsert(props); + assertTrue(cleaned.keySet().isEmpty()); + } + + @Test + public void testConvertDBObjectToMapHandlesIds(){ + DBObject doc = new BasicDBObject(); + doc.put(MongoClient.MONGO_INTERNAL_SPARSE_UUID_FIELD, "ID"); + Map cleaned = MongoUtils.convertDBObjectToMap(doc); + + assertTrue(cleaned.containsKey(MongoClient.MONGO_INTERNAL_ID_FIELD)); + assertFalse(cleaned.containsKey(MongoClient.MONGO_INTERNAL_SPARSE_UUID_FIELD)); + assertEquals("ID", cleaned.get(MongoClient.MONGO_INTERNAL_ID_FIELD)); + } + + // --- Dates and Calendars + @Test + public void testCleanPropertiesPreservesDates(){ + Calendar cal = new GregorianCalendar(); + Date date = cal.getTime(); + Map props = new HashMap(); + props.put("cal", cal); + DBObject cleaned = MongoUtils.cleanPropertiesForInsert(props); + DBObject set = (DBObject)cleaned.get(Operators.SET); + assertEquals(date, set.get("cal")); + } + + @Test + public void testCleanPropertiesSavesCalendarTimeZone(){ + Calendar cal = new GregorianCalendar(); + Date date = cal.getTime(); + TimeZone tz = cal.getTimeZone(); + Map props = new HashMap(); + props.put("cal", cal); + DBObject cleaned = MongoUtils.cleanPropertiesForInsert(props); + DBObject set = (DBObject)cleaned.get(Operators.SET); + Date cleanedDate = (Date)set.get("cal"); + String cleanedTz = (String)set.get("_:mongo:tz:cal"); + assertEquals(date, cleanedDate); + assertEquals(tz.getID(), cleanedTz); + } + + @Test + public void testConvertDBObjectToMapHandlesCalendars(){ + DBObject doc = new BasicDBObject(); + Date date = new Date(); + TimeZone tz = TimeZone.getDefault(); + doc.put("cal", date); + doc.put("_:mongo:tz:cal", tz.getID()); + Map cleaned = MongoUtils.convertDBObjectToMap(doc); + + Calendar cal = (Calendar)cleaned.get("cal"); + assertTrue(cleaned.containsKey("cal")); + assertFalse(cleaned.containsKey("_:mongo:tz:cal")); + assertEquals(tz, cal.getTimeZone()); + assertEquals(date, cal.getTime()); + } + + @Test + public void testCalendars(){ + Calendar cal = new GregorianCalendar(); + Map props = new HashMap(); + props.put("cal", cal); + DBObject cleaned = MongoUtils.cleanPropertiesForInsert(props); + Map props2 = MongoUtils.convertDBObjectToMap((DBObject)cleaned.get(Operators.SET)); + + Calendar cal2 = (Calendar)props2.get("cal"); + assertEquals(cal, cal2); + assertEquals(cal.getTime(), cal2.getTime()); + assertEquals(cal.getTimeZone(), cal2.getTimeZone()); + } + + // --- Big Decimals + @Test + public void testCleanPropertiesPreservesBigDecimals(){ + BigDecimal bd = new BigDecimal(BigInteger.TEN); + + Map props = new HashMap(); + props.put("bigone", bd); + DBObject cleaned = MongoUtils.cleanPropertiesForInsert(props); + DBObject set = (DBObject)cleaned.get(Operators.SET); + assertEquals(bd, new BigDecimal((String)set.get("_:mongo:bd:bigone"))); + } + + @Test + public void testConvertDBObjectToMapHandlesBigDecimals(){ + DBObject doc = new BasicDBObject(); + BigDecimal bd = new BigDecimal(10); + + doc.put("bigone", bd); + doc.put("_:mongo:bd:bigone", "10"); + Map cleaned = MongoUtils.convertDBObjectToMap(doc); + + assertTrue(cleaned.containsKey("bigone")); + assertFalse(cleaned.containsKey("_:mongo:bd:bigone")); + + BigDecimal bd2 = (BigDecimal)cleaned.get("bigone"); + assertEquals(bd, bd2); + + } + + @Test + public void testBigDecimals(){ + BigDecimal bd = new BigDecimal(10); + Map props = new HashMap(); + props.put("bigone", bd); + DBObject cleaned = MongoUtils.cleanPropertiesForInsert(props); + Map props2 = MongoUtils.convertDBObjectToMap((DBObject)cleaned.get(Operators.SET)); + assertEquals(props.get("bigone"), props2.get("bigone")); + } + + // -- Field Names + @Test + public void testEscapeFieldName(){ + assertEquals(escaped, MongoUtils.escapeFieldName(fieldName)); + assertEquals("_id", MongoUtils.escapeFieldName("_id")); + assertEquals("homer", MongoUtils.escapeFieldName("homer")); + } + + @Test + public void testUnescapeFieldName(){ + assertEquals(fieldName, MongoUtils.unescapeFieldName(escaped)); + assertEquals("_id", MongoUtils.unescapeFieldName("_id")); + assertEquals("homer", MongoUtils.unescapeFieldName("homer")); + } +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/ContentCreateSoak.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/ContentCreateSoak.java new file mode 100644 index 00000000..cf9a2790 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/ContentCreateSoak.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.mongo; + +import java.io.IOException; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; +import org.sakaiproject.nakamura.lite.soak.content.ContentCreateClient; +import org.sakaiproject.nakamura.lite.storage.mongo.MongoClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +public class ContentCreateSoak extends AbstractSoakController { + + private int totalContent; + private StorageClientPool clientPool; + private Configuration configuration; + private Map contentMap; + + public ContentCreateSoak(int totalContent, + StorageClientPool connectionPool, Configuration configuration, Map cm) { + super(totalContent); + this.configuration = configuration; + this.clientPool = connectionPool; + this.totalContent = totalContent; + this.contentMap = cm; + } + + protected Runnable getRunnable(int nthreads) throws ClientPoolException, + StorageClientException, AccessDeniedException { + int contentPerThread = totalContent / nthreads; + return new ContentCreateClient(contentPerThread, + clientPool, configuration, contentMap); + } + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + + int totalContent = 100000; + int nthreads = 1; + + if (argv.length > 0) { + nthreads = StorageClientUtils.getSetting(Integer.valueOf(argv[0]), nthreads); + } + if (argv.length > 1) { + totalContent = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalContent); + } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map cm = Maps.newHashMap(); + cm.put("sling:resourceType","test/resourcetype"); + cm.put("sakai:pooled-content-manager",new String[]{"a","b"}); + cm.put("sakai:type","sdfsdaggdsfgsdgsd"); + cm.put("sakai:marker","marker-marker-marker"); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + + ContentCreateSoak contentCreateSoak = new ContentCreateSoak( + totalContent, getClientPool(configuration), configuration, cm); + contentCreateSoak.launchSoak(nthreads); + + } + + protected static StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + MongoClientPool clientPool = new MongoClientPool(); + try { + clientPool.bindConfiguration(configuration); + clientPool + .activate(ImmutableMap + .of(MongoClientPool.PROP_MONGO_URI, (Object) "mongodb://127.0.0.1", + MongoClientPool.PROP_MONGO_DB, (Object) "smc_soaktests")); + } catch (Exception e) { + throw new RuntimeException(e.getMessage(), e); + } + return clientPool; + } + + + +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/CreateUsersAndGroupsSoak.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/CreateUsersAndGroupsSoak.java new file mode 100644 index 00000000..e1ad8105 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/CreateUsersAndGroupsSoak.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.mongo; + +import java.io.IOException; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; +import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsClient; +import org.sakaiproject.nakamura.lite.storage.mongo.MongoClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +public class CreateUsersAndGroupsSoak extends AbstractSoakController { + private int totalUsers; + private StorageClientPool clientPool; + private Configuration configuration; + + public CreateUsersAndGroupsSoak(int totalUsers, StorageClientPool clientPool, Configuration configuration) { + super(totalUsers); + this.clientPool = clientPool; + this.configuration = configuration; + this.totalUsers = totalUsers; + } + + protected Runnable getRunnable(int nthreads) throws ClientPoolException, + StorageClientException, AccessDeniedException { + int usersPerThread = totalUsers / nthreads; + return new CreateUsersAndGroupsClient(usersPerThread, clientPool, configuration); + } + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + + int totalUsers = 1000; + int nthreads = 10; + + if (argv.length > 0) { + nthreads = StorageClientUtils.getSetting(Integer.valueOf(argv[0]), nthreads); + } + if (argv.length > 1) { + totalUsers = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalUsers); + } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + + CreateUsersAndGroupsSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsSoak( + totalUsers, getClientPool(configuration), configuration); + createUsersAndGroupsSoak.launchSoak(nthreads); + } + + protected static StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + MongoClientPool clientPool = new MongoClientPool(); + try { + clientPool.bindConfiguration(configuration); + clientPool + .activate(ImmutableMap + .of(MongoClientPool.PROP_MONGO_URI, (Object) "mongodb://127.0.0.1/?maxpoolsize=1000", + MongoClientPool.PROP_MONGO_DB, (Object) "smc_soaktests")); + } catch (Exception e) { + throw new RuntimeException(e.getMessage(), e); + } + return clientPool; + } +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/CreateUsersAndGroupsWithMembersSoak.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/CreateUsersAndGroupsWithMembersSoak.java new file mode 100644 index 00000000..29ab8c30 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/CreateUsersAndGroupsWithMembersSoak.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.mongo; + +import java.io.IOException; +import java.util.Map; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Configuration; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.lite.ConfigurationImpl; +import org.sakaiproject.nakamura.lite.soak.AbstractSoakController; +import org.sakaiproject.nakamura.lite.soak.authorizable.CreateUsersAndGroupsWithMembersClient; +import org.sakaiproject.nakamura.lite.storage.mongo.MongoClientPool; +import org.sakaiproject.nakamura.lite.storage.spi.StorageClientPool; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +public class CreateUsersAndGroupsWithMembersSoak extends AbstractSoakController { + + private int totalUsers; + private StorageClientPool clientPool; + private int totalGroups; + private Configuration configuration; + + public CreateUsersAndGroupsWithMembersSoak(int totalUsers, int totalGroups, + StorageClientPool clientPool, Configuration configuration) { + super(totalUsers + (totalGroups * 5)); + this.clientPool = clientPool; + this.configuration = configuration; + this.totalUsers = totalUsers; + this.totalGroups = totalGroups; + } + + protected Runnable getRunnable(int nthreads) throws ClientPoolException, + StorageClientException, AccessDeniedException { + int usersPerThread = totalUsers / nthreads; + int groupsPerThread = totalGroups / nthreads; + return new CreateUsersAndGroupsWithMembersClient(usersPerThread, groupsPerThread, + clientPool, configuration); + } + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + + int totalUsers = 1000; + int totalGroups = 100; + int nthreads = 10; + + if (argv.length > 0) { + nthreads = StorageClientUtils.getSetting(Integer.valueOf(argv[0]), nthreads); + } + if (argv.length > 1) { + totalUsers = StorageClientUtils.getSetting(Integer.valueOf(argv[1]), totalUsers); + } + if (argv.length > 2) { + totalGroups = StorageClientUtils.getSetting(Integer.valueOf(argv[2]), totalUsers); + } + ConfigurationImpl configuration = new ConfigurationImpl(); + Map properties = Maps.newHashMap(); + properties.put("keyspace", "n"); + properties.put("acl-column-family", "ac"); + properties.put("authorizable-column-family", "au"); + properties.put("content-column-family", "cn"); + configuration.activate(properties); + + CreateUsersAndGroupsWithMembersSoak createUsersAndGroupsSoak = new CreateUsersAndGroupsWithMembersSoak( + totalUsers, totalGroups, getClientPool(configuration), configuration); + createUsersAndGroupsSoak.launchSoak(nthreads); + } + + protected static StorageClientPool getClientPool(Configuration configuration) throws ClassNotFoundException { + MongoClientPool clientPool = new MongoClientPool(); + try { + clientPool.bindConfiguration(configuration); + clientPool + .activate(ImmutableMap + .of(MongoClientPool.PROP_MONGO_URI, (Object) "mongodb://127.0.0.1/?maxpoolsize=1000", + MongoClientPool.PROP_MONGO_DB, (Object) "smc_soaktests")); + } catch (Exception e) { + throw new RuntimeException(e.getMessage(), e); + } + return clientPool; + } +} diff --git a/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/SoakAll.java b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/SoakAll.java new file mode 100644 index 00000000..ee30b563 --- /dev/null +++ b/drivers/mongo/src/test/java/org/sakaiproject/nakamura/lite/soak/mongo/SoakAll.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.lite.soak.mongo; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; + +import java.io.IOException; + +public class SoakAll { + + public static void main(String[] argv) throws ClientPoolException, StorageClientException, + AccessDeniedException, ClassNotFoundException, IOException { + CreateUsersAndGroupsSoak.main(argv); + CreateUsersAndGroupsWithMembersSoak.main(argv); + } +} diff --git a/extensions/app/assembly.xml b/extensions/app/assembly.xml new file mode 100644 index 00000000..38fb348f --- /dev/null +++ b/extensions/app/assembly.xml @@ -0,0 +1,66 @@ + + + assembly + + zip + tar.gz + tar.bz2 + + true + sling-${version} + + + + src/main/dist/unix + / + unix + 0744 + + serverctl + + + + src/main/dist/bin + / + + + target + bin + + *-standalone.jar + + + + + + src/main/dist/unix/serverctl + / + unix + 0744 + true + + + src/main/dist/dos/server.bat + / + dos + true + + + diff --git a/extensions/app/pom.xml b/extensions/app/pom.xml new file mode 100644 index 00000000..43215a97 --- /dev/null +++ b/extensions/app/pom.xml @@ -0,0 +1,185 @@ + + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../parent/pom.xml + + + uk.co.tfd.sm.app + jar + 1.0-SNAPSHOT + + Sparse Map :: Standalone App + Standalone Launchpad Java Application. Everything needed to run the Launchpad is + included in a single JAR file. + + + + + org.apache.sling + maven-launchpad-plugin + 2.1.0 + + + prepare-package-jar + + prepare-package + + + jar + false + + + org.sakaiproject.nakamura + uk.co.tfd.sm.jetty + 0.1-SNAPSHOT + 30 + + + + + + attach-bundle-list + + attach-bundle-list + + + false + + + + create-karaf-descriptor + + create-karaf-descriptor + + + false + + org.apache.felix:org.apache.felix.configadmin + org.apache.felix:org.apache.felix.webconsole + + + + + + + org.apache.maven.plugins + maven-resources-plugin + 2.5 + + + include-resource-modifications + process-resources + + resources + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.3.2 + + + + + true + + + + Sparse Server + ${project.version} + Ian Boston + Sparse Server + ${project.version} + ${project.groupId} + Ian Boston + org.sakaiproject.nakamura.app.NakamuraMain + + + + + + maven-assembly-plugin + + + src/main/assembly/bin.xml + + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.8 + + + + org.apache.sling + + + + + + + + + org.apache.sling + org.apache.sling.launchpad.base + 2.3.0 + webapp + war + runtime + + + org.apache.sling + org.apache.sling.launchpad.base + 2.3.0 + app + provided + + + + + + dist + + + + maven-assembly-plugin + + + assemble-distribution + + + + assembly.xml + + + + false + + + package + + attached + + + + + + + + + + diff --git a/extensions/app/proxy/config/vivo/cornell b/extensions/app/proxy/config/vivo/cornell new file mode 100644 index 00000000..24da709d --- /dev/null +++ b/extensions/app/proxy/config/vivo/cornell @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/cornell.raw.json?vid=individual5320 +request-proxy-endpoint = http://vivo.cornell.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.cornell.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +finaltemplate = vivoprofile.vm +content-type = text/html +content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/cornell.html b/extensions/app/proxy/config/vivo/cornell.html new file mode 100644 index 00000000..12ce3687 --- /dev/null +++ b/extensions/app/proxy/config/vivo/cornell.html @@ -0,0 +1,25 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/cornell.raw.json?vid=individual5320 +request-proxy-endpoint = http://vivo.cornell.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.cornell.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +finaltemplate = cornell.html.vm +finaltemplatepattern = cornell{0}.html.vm +finaltemplateselectorproperty = rdf_type +templatepriorities = foaf_Organization:1,Organization:10,ResearchUnit:20 +content-type = text/html +content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/cornell.json b/extensions/app/proxy/config/vivo/cornell.json new file mode 100644 index 00000000..9339406c --- /dev/null +++ b/extensions/app/proxy/config/vivo/cornell.json @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/cornell.raw.json?vid=individual5320 +request-proxy-endpoint = http://vivo.cornell.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.cornell.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +#finaltemplate = vivoprofile.vm +#content-type = text/html +#content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/cornell.raw.json b/extensions/app/proxy/config/vivo/cornell.raw.json new file mode 100644 index 00000000..d05d14a8 --- /dev/null +++ b/extensions/app/proxy/config/vivo/cornell.raw.json @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/cornell.raw.json?vid=individual5320 +request-proxy-endpoint = http://vivo.cornell.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.cornell.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +#finaltemplate = vivoprofile.vm +#content-type = text/html +#content-encoding = UTF-8 +#result-key = vid diff --git a/extensions/app/proxy/config/vivo/iu b/extensions/app/proxy/config/vivo/iu new file mode 100644 index 00000000..ec972c23 --- /dev/null +++ b/extensions/app/proxy/config/vivo/iu @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/iu.raw.json?vid=person15641 +request-proxy-endpoint = http://vivo.iu.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.iu.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +finaltemplate = vivoprofile.vm +content-type = text/html +content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/iu.raw.json b/extensions/app/proxy/config/vivo/iu.raw.json new file mode 100644 index 00000000..1b76b872 --- /dev/null +++ b/extensions/app/proxy/config/vivo/iu.raw.json @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/iu.raw.json?vid=person15641 +request-proxy-endpoint = http://vivo.iu.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.iu.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +#finaltemplate = vivoprofile.vm +#content-type = text/html +#content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/localhost b/extensions/app/proxy/config/vivo/localhost new file mode 100644 index 00000000..bc1817ef --- /dev/null +++ b/extensions/app/proxy/config/vivo/localhost @@ -0,0 +1,21 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +request-proxy-endpoint = http://localhost:9090/vivo/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.tfd.co.uk/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +finaltemplate = vivoprofile.vm +content-type = text/html +content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/localhost.raw.json b/extensions/app/proxy/config/vivo/localhost.raw.json new file mode 100644 index 00000000..4c959941 --- /dev/null +++ b/extensions/app/proxy/config/vivo/localhost.raw.json @@ -0,0 +1,21 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +request-proxy-endpoint = http://localhost:9090/vivo/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.tfd.co.uk/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +#finaltemplate = vivoprofile.vm +#content-type = text/html +#content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/med.cornell b/extensions/app/proxy/config/vivo/med.cornell new file mode 100644 index 00000000..e470f0cd --- /dev/null +++ b/extensions/app/proxy/config/vivo/med.cornell @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/med.cornell.raw.json?vid=cwid-geb2003 +request-proxy-endpoint = http://vivo.med.cornell.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.med.cornell.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +finaltemplate = vivoprofile.vm +content-type = text/html +content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/med.cornell.raw.json b/extensions/app/proxy/config/vivo/med.cornell.raw.json new file mode 100644 index 00000000..9c08ed82 --- /dev/null +++ b/extensions/app/proxy/config/vivo/med.cornell.raw.json @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/med.cornell.raw.json?vid=cwid-geb2003 +request-proxy-endpoint = http://vivo.med.cornell.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.med.cornell.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +#finaltemplate = vivoprofile.vm +#content-type = text/html +#content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/psm b/extensions/app/proxy/config/vivo/psm new file mode 100644 index 00000000..dc3b63eb --- /dev/null +++ b/extensions/app/proxy/config/vivo/psm @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/iu.raw.json?vid=individual9 +request-proxy-endpoint = http://vivo.psm.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.psm.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +finaltemplate = vivoprofile.vm +content-type = text/html +content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/psm.raw.json b/extensions/app/proxy/config/vivo/psm.raw.json new file mode 100644 index 00000000..ea56654a --- /dev/null +++ b/extensions/app/proxy/config/vivo/psm.raw.json @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/iu.raw.json?vid=individual9 +request-proxy-endpoint = http://vivo.psm.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.psm.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +#finaltemplate = vivoprofile.vm +#content-type = text/html +#content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/scripps b/extensions/app/proxy/config/vivo/scripps new file mode 100644 index 00000000..2e86869c --- /dev/null +++ b/extensions/app/proxy/config/vivo/scripps @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/iu.raw.json?vid=JandaKim +request-proxy-endpoint = http://vivo.scripps.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.scripps.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +finaltemplate = vivoprofile.vm +content-type = text/html +content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/scripps.raw.json b/extensions/app/proxy/config/vivo/scripps.raw.json new file mode 100644 index 00000000..3221d3b2 --- /dev/null +++ b/extensions/app/proxy/config/vivo/scripps.raw.json @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/iu.raw.json?vid=JandaKim +request-proxy-endpoint = http://vivo.scripps.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.scripps.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +#finaltemplate = vivoprofile.vm +#content-type = text/html +#content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/ufl b/extensions/app/proxy/config/vivo/ufl new file mode 100644 index 00000000..a74a2943 --- /dev/null +++ b/extensions/app/proxy/config/vivo/ufl @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/ufl.raw.json?vid=n128483 +request-proxy-endpoint = http://vivo.ufl.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.ufl.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +finaltemplate = vivoprofile.vm +content-type = text/html +content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/ufl.raw.json b/extensions/app/proxy/config/vivo/ufl.raw.json new file mode 100644 index 00000000..ebdef60a --- /dev/null +++ b/extensions/app/proxy/config/vivo/ufl.raw.json @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/ufl.raw.json?vid=n128483 +request-proxy-endpoint = http://vivo.ufl.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.ufl.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +#finaltemplate = vivoprofile.vm +#content-type = text/html +#content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/wustl b/extensions/app/proxy/config/vivo/wustl new file mode 100644 index 00000000..93a7dc52 --- /dev/null +++ b/extensions/app/proxy/config/vivo/wustl @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/wustl.raw.json?vid=hrfact335386082 +request-proxy-endpoint = http://vivo.wustl.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.wustl.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +finaltemplate = vivoprofile.vm +content-type = text/html +content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/proxy/config/vivo/wustl.raw.json b/extensions/app/proxy/config/vivo/wustl.raw.json new file mode 100644 index 00000000..addf4b46 --- /dev/null +++ b/extensions/app/proxy/config/vivo/wustl.raw.json @@ -0,0 +1,22 @@ +# Proxy Setup +# Get operation to the Cornell RDF end point. +# test with http://localhost:8080/proxy/vivo/wustl.raw.json?vid=hrfact335386082 +request-proxy-endpoint = http://vivo.wustl.edu/individual/${vid}/${vid}.rdf +request-proxy-method = GET +# Pre processor setup +preprocessor = None +# Post processor setup +# The vivo URL for the instance must be listed as a defaul name space +# in this instance thats http://vivo.tfd.co.uk/individual/ +postprocessor = RDFToHTMLResolvedJsonProxyPostProcessor +namespacemap = rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#; \ + vivocore=http://vivoweb.org/ontology/core#; \ + http://vivo.wustl.edu/individual/; \ + rdfs=http://www.w3.org/2000/01/rdf-schema#; \ + vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#; \ + foaf=http://xmlns.com/foaf/0.1/; \ + owl=http://www.w3.org/2002/07/owl# +#finaltemplate = vivoprofile.vm +#content-type = text/html +#content-encoding = UTF-8 +result-key = vid diff --git a/extensions/app/src/main/assembly/bin.xml b/extensions/app/src/main/assembly/bin.xml new file mode 100644 index 00000000..73e0cb53 --- /dev/null +++ b/extensions/app/src/main/assembly/bin.xml @@ -0,0 +1,50 @@ + + + + bin + + tar.gz + zip + + + + + README* + + + + src/main/resources/META-INF + + + LICENSE* + NOTICE* + DISCLAIMER + + + + target + + + *.jar + *.war + + + + \ No newline at end of file diff --git a/extensions/app/src/main/bundles/list.xml b/extensions/app/src/main/bundles/list.xml new file mode 100644 index 00000000..db1236c2 --- /dev/null +++ b/extensions/app/src/main/bundles/list.xml @@ -0,0 +1,179 @@ + + + + + commons-io + commons-io + 1.4 + + + commons-fileupload + commons-fileupload + 1.2.2 + + + commons-collections + commons-collections + 3.2.1 + + + commons-lang + commons-lang + 2.5 + + + commons-pool + commons-pool + 1.5.5 + + + commons-codec + commons-codec + 1.5 + + + + org.apache.felix + org.apache.felix.http.whiteboard + 2.0.4 + + + org.apache.aries.jmx + org.apache.aries.jmx.api + 0.1-incubating + + + org.apache.aries.jmx + org.apache.aries.jmx.core + 0.1-incubating + + + org.apache.geronimo.specs + geronimo-jta_1.1_spec + 1.1.1 + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.milton + 1.6.4-SNAPSHOT + + + com.google.code.gson + gson + 1.7.1 + + + + + org.apache.sling + org.apache.sling.commons.log + 2.0.7-20100823 + + + + + org.apache.felix + org.apache.felix.webconsole + 3.1.2 + + + org.apache.felix + org.apache.felix.webconsole.plugins.event + 1.0.2 + + + org.apache.felix + org.apache.felix.webconsole.plugins.memoryusage + 1.0.2 + + + + + + org.apache.felix + org.apache.felix.eventadmin + 1.2.2 + + + org.apache.felix + org.apache.felix.scr + 1.6.0 + + + org.apache.felix + org.apache.felix.configadmin + 1.2.4 + + + org.apache.felix + org.apache.felix.metatype + 1.0.4 + + + + + + + + + + com.googlecode.guava-osgi + guava-osgi + 9.0.0 + + + + + org.sakaiproject.nakamura + uk.co.tfd.sm.memory + 0.1-SNAPSHOT + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5-SNAPSHOT + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.jdbc-driver + 1.5-SNAPSHOT + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.solr + 1.4-SNAPSHOT + + + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.webdav + 0.1-SNAPSHOT + + + org.sakaiproject.nakamura + uk.co.tfd.sm.template + 0.1-SNAPSHOT + + + org.sakaiproject.nakamura + uk.co.tfd.sm.http + 0.1-SNAPSHOT + + + org.sakaiproject.nakamura + uk.co.tfd.sm.proxy + 0.1-SNAPSHOT + + + org.sakaiproject.nakamura + uk.co.tfd.sm.jaxrs + 0.1-SNAPSHOT + + + org.sakaiproject.nakamura + uk.co.tfd.sm.resource + 0.1-SNAPSHOT + + + diff --git a/extensions/app/src/main/dist/dos/server.bat b/extensions/app/src/main/dist/dos/server.bat new file mode 100644 index 00000000..9b0e37a8 --- /dev/null +++ b/extensions/app/src/main/dist/dos/server.bat @@ -0,0 +1,196 @@ +:: +:: Licensed to the Apache Software Foundation (ASF) under one +:: or more contributor license agreements. See the NOTICE file +:: distributed with this work for additional information +:: regarding copyright ownership. The ASF licenses this file +:: to you under the Apache License, Version 2.0 (the +:: "License"); you may not use this file except in compliance +:: with the License. You may obtain a copy of the License at +:: +:: http://www.apache.org/licenses/LICENSE-2.0 +:: +:: Unless required by applicable law or agreed to in writing, +:: software distributed under the License is distributed on an +:: "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +:: KIND, either express or implied. See the License for the +:: specific language governing permissions and limitations +:: under the License. + +@echo off +setlocal + +::----------------------------------------------------------------------------- +:: HTTP Service Arguments +::----------------------------------------------------------------------------- + +::* Default Port +:: + set SLING_PORT=8080 + +::* Default Bind Address +:: + set SLING_ADDR=0.0.0.0 + +::----------------------------------------------------------------------------- +:: General JVM Arguments +::----------------------------------------------------------------------------- + +::* Uncomment one of the JVM_TYPE if you wish a different than '-hotspot' +:: +:: set JVM_TYPE=-classic +:: set JVM_TYPE=-server + +::* Memory settings +:: + set JVM_MINHEAP=-Xms64m + set JVM_MAXHEAP=-Xmx256m + +::* Uncomment, or redefine one of the follwoing JAVA_HOME if you wish a +:: different than your default one +:: +:: set JAVA_HOME=c:\java\jdk1.4.2_08 + +::* 'Exectuable' +:: + set JVM_START=-jar bin/${project.build.finalName}-standalone.jar + +::* Additional JVM Options +:: +:: set JVM_OPTS=-Djava.security.auth.login.config=etc/jaas.config + +::* Debug Port (only if started with -debug socket) +:: + set JVM_DEBUG_PORT=30303 + +::* Default Debug Options +:: + set JVM_DEBUG_OPTS=-Xdebug -Xnoagent -Djava.compiler=NONE + + +::----------------------------------------------------------------------------- +:: should not change below here +::----------------------------------------------------------------------------- + +:: change drive and directory +%~d0 +cd %~p0 + +:: set window title +set PROGRAM_TITLE=Apache Sling + +:: parse arguments +:while1 + if "%1"=="-debug" ( + set JVM_DEBUG=%JVM_DEBUG_OPTS% + set JVM_DEBUG_TRANSPORT=dt_socket + set JVM_DEBUG_SUSPENDED=n + set JVM_DEBUG_ADR=%JVM_DEBUG_PORT% + goto next1 + ) + + if "%1"=="-suspended" ( + set JVM_DEBUG_SUSPENDED=y + goto next1 + ) + + if "%1"=="-quiet" ( + set NO_INFOMSG=y + goto next1 + ) + + if "%1"=="-jconsole" ( + set JVM_JCONSOLE=-Dcom.sun.management.jmxremote + set NO_INFOMSG=y + goto next1 + ) + + if "%1"=="-level" ( + set SLING_LEVEL=%2 + shift /1 + goto next1 + ) + + if "%1"=="-help" ( + goto usage + ) + + if "%1" NEQ "" ( + echo invalid argument %1 + goto usage + ) + +:next1 + shift /1 + if "%1" NEQ "" goto while1 + + +:: assemble jvm options +set JVM_ADDOPTS=%JVM_OPTS% +set JVM_OPTS=%JVM_MINHEAP% %JVM_MAXHEAP% +if defined JVM_JCONSOLE set JVM_OPTS=%JVM_JCONSOLE% %JVM_OPTS% +if defined JVM_TYPE set JVM_OPTS=%JVM_TYPE% %JVM_OPTS% +if defined JVM_ADDOPTS set JVM_OPTS=%JVM_OPTS% %JVM_ADDOPTS% + +:: check for JVM +set JAVA="%JAVA_HOME%\bin\java.exe" +if not exist %JAVA% ( + echo No JVM found at %JAVA% + goto exit +) + +:: check for debug +if defined JVM_DEBUG ( + set JVM_OPTS=%JVM_OPTS% %JVM_DEBUG% -Xrunjdwp:transport=%JVM_DEBUG_TRANSPORT%,server=y,suspend=%JVM_DEBUG_SUSPENDED%,address=%JVM_DEBUG_ADR% +) + +:: assemble program arguments +if defined SLING_PORT set SLING_ARGS=%SLING_ARGS% -p %SLING_PORT% +if defined SLING_ADDR set SLING_ARGS=%SLING_ARGS% -a %SLING_ADDR% +if defined SLING_LEVEL set SLING_ARGS=%SLING_ARGS% -l %SLING_LEVEL% + +:: ensure logging to stdout +set SLING_ARGS=%SLING_ARGS% -f - + +:: print info message +if defined NO_INFOMSG goto startcq + +echo ------------------------------------------------------------------------------- +echo Starting %PROGRAM_TITLE% +echo ------------------------------------------------------------------------------- +call %JAVA% %JVM_TYPE% -version +echo ------------------------------------------------------------------------------- +if "%JVM_DEBUG_TRANSPORT%"=="dt_socket" ( + echo debugging: Socket (%JVM_DEBUG_PORT%^) + if "%JVM_DEBUG_SUSPENDED%"=="y" echo starting jvm suspended^! + echo ------------------------------------------------------------------------------- +) +echo %JAVA% %JAVA_VM_TYPE% %JVM_OPTS% %JVM_START% %SLING_ARGS% +echo ------------------------------------------------------------------------------- + + +:startcq + +title %PROGRAM_TITLE% +%JAVA% %JAVA_VM_TYPE% %JVM_OPTS% %JVM_START% %SLING_ARGS% +goto exit + + +::----------------------------------------------------------------------------- +:usage + +echo %PROGRAM_TITLE% +echo usage: %0 [options] +echo. +echo where options include: +echo -debug enable debug +echo -suspended start suspended (only if debug) +echo -quiet don't show info message +echo -jconsole start with -Dcom.sun.management.jmxremote +echo -level [ level ] set initial log level (DEBUG, INFO, ...) +echo -help this usage +echo. +echo for additional tuning, edit the first section of the %0 file + +::----------------------------------------------------------------------------- +:exit +endlocal diff --git a/extensions/app/src/main/dist/unix/serverctl b/extensions/app/src/main/dist/unix/serverctl new file mode 100755 index 00000000..a15f6d98 --- /dev/null +++ b/extensions/app/src/main/dist/unix/serverctl @@ -0,0 +1,574 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# define a special java home +#JAVA_HOME=/opt/java/jdk1.6.0 + +# enable this as default command if no command argument is given +#SLING_COMMAND=start + +# define the port sling listens to. +#SLING_PORT=8080 + +# define the interface sling binds to. +#SLING_INTERFACE=0.0.0.0 + +# enable if jvm-debugging should be enabled +#SLING_JVM_DEBUG=socket + +# set the max amount of java heap space +#SLING_JVM_HEAP_MAX=512 + +# add additional options to the jvm +#SLING_JVM_OPTS=-enableassertions + +# enable starting the engine in foreground +#SLING_FOREGROUND=y + +#--------------------------------------------------------------------- +# Don't edit below here +#--------------------------------------------------------------------- + +#--------------------------------------------------------------------- +# functions +#--------------------------------------------------------------------- + +# print the usage info +usage() { +cat <<-@@ +$SLING_TITLE Control, version 2.0.0 ($SLING_SVN_ID) + usage: `basename $0` [options] {start|stop|restart|status} + +Engine Options: + --context the sling context directory, defaults to `dirname $0` + --port, -p the default port to listen to + --loglevel, -l the initial loglevel. + --logdir the log directory for startup.log. defaults to `dirname $0`/logs + --log the startup log file. defaults to `dirname $0`/logs/startup.log + --interface, -a the interfact to bind to (use 0.0.0.0 for any). + +Java Options: + --javahome the java home directory. overrides JAVA_HOME env var. + --heap-min the minimum heap in megabytes. defaults to 128 + --heap-max the maximum heap in megabytes. defaults to 256 + --debug, -d [socket|shmem] + starts jvm in debug mode. default 'socket' + --debug-suspended [socket|shmem] + starts jvm in suspended debug mode. default 'socket' + --debug-port port for debug address. default "30303" + --profile [yjp] start jvm with YourKit Java Profiler + --javaopts additional java options + --jaas use jaas.config. default is disabled + --jaas-config config for jaas. default is etc/jaas.config + --verbose-gc turn on vebose gc + +Other options + --fg, -D starts Sling in foreground + --bg starts Sling in background. this is the default. + --verbose, -v be more verbose + --dry prepare but do not start + --help, -h show this help + --max-files sets the ulimit for max open files before executing the jvm. + default is 1024 +@@ +} + +# init the defaults +initDefaults() { + BASEDIR=`fullpath -d $0` + SLING_TITLE="Sling" + export SLING_TITLE + SLING_SVN_ID='$Rev: 25335 $' + export SLING_SVN_ID + SLING_CONTEXT="${SLING_CONTEXT:-$BASEDIR}" + export SLING_CONTEXT + export SLING_PORT + export SLING_LOGLEVEL + SLING_LOGDIR="${SLING_LOGDIR:-"$BASEDIR/logs"}" + export SLING_LOGDIR + SLING_LOG="${SLING_LOG:-"$SLING_LOGDIR/startup.log"}" + export SLING_LOG + SLING_INTERFACE="${SLING_INTERFACE:-$SLING_ADDRESS}" + export SLING_INTERFACE + SLING_JVM_HEAP_MIN="${SLING_HEAP_MIN:-"128"}" + export SLING_JVM_HEAP_MIN + SLING_JVM_HEAP_MAX="${SLING_HEAP_MAX:-"256"}" + export SLING_JVM_HEAP_MAX + export SLING_JVM_JAAS + SLING_JVM_JAAS_CONFIG=etc/jaas.config + export SLING_JVM_JAAS_CONFIG + export SLING_JVM_OPTS + export SLING_JVM_VERBOSE_GC + export SLING_JVM_DEBUG + SLING_JVM_DEBUG_PORT=30303 + export SLING_JVM_DEBUG_PORT + SLING_JVM_DEBUG_SUSPENDED=n + export SLING_JVM_DEBUG_SUSPENDED + export SLING_JVM_PROFILE + export SLING_FOREGROUND + export SLING_DRY + SLING_COMMAND=${SLING_COMMAND} + export SLING_COMMAND + export SLING_VERBOSE + SLING_MAX_OPEN_FILES="${SLING_MAX_OPEN_FILES:-"1024"}" + export SLING_MAX_OPEN_FILES +} + +# echo to stderr and to the log file +_log() { + _level=$1; shift + if [ "$use_stderr" = "YES" ] ; then + echo "`date '+%d.%m.%Y %H:%M:%S'` *$_level* $*" >&2 + fi + if [ -w "$SLING_LOG" ]; then + echo "`date '+%d.%m.%Y %H:%M:%S'` *$_level* $*" >> "$SLING_LOG" + fi +} + +# log an error message +err() { + _log "ERROR" $* +} + +# log an warning message +warn() { + _log "WARN " $* +} + +# log an info message +info() { + _log "INFO " $* +} + +# print all relevant variables +dump() { + set | grep SLING +} + +# calculate the fullpath +fullpath() { + OPWD="$PWD" + if [ "$1" = "-d" ]; then + cd `dirname $2` + echo `pwd -L` + else + cd `dirname $1` + echo `pwd -L`/`basename $1` + fi + cd $OPWD +} + +# check if verbose level +verbose() { + test -n "$SLING_VERBOSE" +} + +# print the java version of the $SLING_JVM vm +javaVersion() { + jvm_version=`$SLING_JVM -version 2>&1 | grep "java version"` + case "$jvm_version" in + "java version \"1.2"*) echo 1.2;; + "java version \"1.3"*) echo 1.3;; + "java version \"1.4"*) echo 1.4;; + "java version \"1.5"*) echo 1.5;; + "java version \"1.6"*) echo 1.6;; + *) echo ;; + esac +} + +# print the debug info +printDebug() { + if [ "$SLING_JVM_DEBUG" = "socket" ]; then + info "attaching debugger on port ${SLING_JVM_DEBUG_PORT}" + if [ "${SLING_JVM_DEBUG_SUSPENDED}" = y ]; then + info "jvm is suspended! attach debugger to continue." + fi + fi + if [ "$SLING_JVM_DEBUG" = "shmem" ]; then + info "attaching debugger using shared memory" + if [ "${SLING_JVM_DEBUG_SUSPENDED}" = y ]; then + info "jvm is suspended! attach debugger to continue." + fi + fi +} + +#--------------------------------------------------------------------- +# main program begins here +#--------------------------------------------------------------------- + +# parse the arguments +initDefaults +while [ -n "$1" ]; do + case "$1" in + '--context') + SLING_CONTEXT=$2 + shift;; + '--port' | '-p') + SLING_PORT=$2 + shift;; + '--loglevel' | '-l') + SLING_LOGLEVEL=$2 + shift;; + '--logdir') + SLING_LOGDIR=$2 + shift;; + '--log') + SLING_LOG=$2 + shift;; + '--interface' | '-a') + SLING_INTERFACE=$2 + shift;; + '--javahome') + JAVA_HOME=$2 + shift;; + '--javaopts') + SLING_JVM_OPTS=$2 + shift;; + '--heap-min') + SLING_JVM_HEAP_MIN=$2 + shift;; + '--heap-max') + SLING_JVM_HEAP_MAX=$2 + shift;; + '--debug' | '-d' | '--debug-suspended') + SLING_JVM_DEBUG=socket + if [ "$1" == "--debug-suspended" ]; then + SLING_JVM_DEBUG_SUSPENDED=y + fi + if [ "$2" == "socket" -o "$2" == "shmem" ]; then + SLING_JVM_DEBUG=$2 + shift + fi;; + '--profile') + SLING_JVM_PROFILE=yjp + if [ "$2" = "yjp" ]; then + SLING_JVM_PROFILE=$2 + shift + fi;; + '--debug-port') + SLING_JVM_DEBUG_PORT=$2 + shift;; + '--debug-suspended') + SLING_JVM_DEBUG_SUSPENDED=y + ;; + '--jaas') + SLING_JVM_JAAS=y + ;; + '--jaas-config') + SLING_JVM_JAAS_CONFIG=$2 + shift;; + '--verbose-gc') + SLING_JVM_VERBOSE_GC=y + ;; + '--fg' | '-D') + SLING_FOREGROUND=y + ;; + '--bg') + SLING_FOREGROUND=n + ;; + '--verbose' | '-v') + SLING_VERBOSE=y + ;; + '--max-files') + SLING_MAX_OPEN_FILES=$2 + shift;; + '--dry') + SLING_DRY=y + ;; + '--help' | '-h') + usage + exit ;; + 'status'|'start'|'psmon'|'terminator'|'bgstart'|'stop'|'restart') + SLING_COMMAND=$1 + ;; + *) + echo "Invalid option: $1" + usage + exit;; + esac + shift +done + +#-------------------------------------------------------------------------------- +if [ ! -d "$SLING_CONTEXT" ] ; then + mkdir -p "$SLING_CONTEXT" +fi +if [ ! -f "$SLING_CONTEXT/bin/${project.build.finalName}-standalone.jar" ] ; then + echo "Bad SLING_CONTEXT: $SLING_CONTEXT (bin/${project.build.finalName}-standalone.jar not found)" >&2 + exit 4 +fi +cd "$SLING_CONTEXT" + +case "$SLING_COMMAND" in + 'start') # test to be sure we can run, then bg + # the startup + use_stderr="YES" + + + if [ ! -d "$SLING_LOGDIR" ] ; then + mkdir -p "$SLING_LOGDIR" 2>/dev/null || \ + ( echo "mkdir failed for $SLING_LOGDIR" >&2 ; exit 2 ) + fi + for file in "$SLING_LOG" "$SLING_LOGDIR/sling.pid" "$SLING_LOGDIR/monitor.pid" ; do + if [ ! -w "$file" ] ; then + touch "$file" 2>/dev/null || \ + ( "Couldn't create file $file" >&2 ; exit 2 ) + fi + done + + if [ -n "$JAVA_HOME" -a -x "$JAVA_HOME/bin/java" ] ; then + SLING_JVM=$JAVA_HOME/bin/java + else + JAVA_HOME= + SLING_JVM="`which java 2>/dev/null`" + if [ -z "$SLING_JVM" ] ; then + for pfix in "/usr/local" "/opt" "/usr" ; do + for jvers in "java" "j2sdk" "j2sdk1.4" "java1.4" \ + "j2sdk1.3.1" "java1.3.1" "j2sdk1.3" "java1.3" ; do + if [ -x "$pfix/$jvers/bin/java" ] ; then + SLING_JVM="$pfix/$jvers/bin/java" + break 2 + fi + done + done + if [ -z "$SLING_JVM" ] ; then + err "Unable to locate java, please make sure java is installed and JAVA_HOME set" + exit 3 + fi + fi + fi + + # check if already running + SLING_PID=`cat "$SLING_LOGDIR/sling.pid" 2> /dev/null` + if [ -n "$SLING_PID" ]; then + if ps -p $SLING_PID > /dev/null; then + err "process $SLING_PID already running. please stop and try again." + exit 4 + else + warn "pid file $SLING_LOGDIR/sling.pid present, but process not running. "\ + "maybe unclean shutdown ?" + rm -f "$SLING_LOGDIR/sling.pid" + SLING_PID= + fi + fi + + # assemble the startup param + SLING_JAVA_VERSION=`javaVersion` + if [ -n "$SLING_JVM_JAAS" ]; then + jvmOpts="$jvmOpts -Djava.security.auth.login.config=$SLING_JVM_JAAS_CONFIG" + fi + if [ -n "$SLING_JVM_VERBOSE_GC" ]; then + jvmOpts="$jvmOpts -verbose:gc" + fi + jvmOpts="$jvmOpts -Xms${SLING_JVM_HEAP_MIN}m" + jvmOpts="$jvmOpts -Xmx${SLING_JVM_HEAP_MAX}m" + jvmOpts="$jvmOpts -Djava.awt.headless=true" + + if [ -n "$SLING_JVM_DEBUG" ]; then + jvmOpts="$jvmOpts -Xdebug -Xnoagent -Djava.compiler=NONE \ +-Xrunjdwp:transport=dt_$SLING_JVM_DEBUG,address=$SLING_JVM_DEBUG_PORT,\ +server=y,suspend=${SLING_JVM_DEBUG_SUSPENDED}" + fi + if [ "$SLING_JVM_PROFILE" = yjp ]; then + if [ $SLING_JAVA_VERSION = "1.4" ]; then + jvmOpts="$jvmOpts -Xrunyjpagent" + else + jvmOpts="$jvmOpts -agentlib:yjpagent" + fi + fi + jvmOpts="$jvmOpts $SLING_JVM_OPTS" + + # assemble program arguments + if [ -n "$SLING_PORT" ]; then + slingOpts="$slingOpts -p $SLING_PORT" + fi + if [ -n "$SLING_INTERFACE" ]; then + slingOpts="$slingOpts -a $SLING_INTERFACE" + fi + if [ -n "$SLING_LOGLEVEL" ]; then + slingOpts="$slingOpts -l $SLING_LOGLEVEL" + fi + if [ -w "$SLING_LOG" ]; then + slingOpts="$slingOpts -f -" + fi + + # executable string + jvmExe="$SLING_JVM $jvmOpts -jar $SLING_CONTEXT/bin/${project.build.finalName}.jar $slingOpts" + export jvmExe + + if verbose; then + dump + echo "" + echo "Execute: $jvmExe" + echo "" + fi + if [ -n "$SLING_DRY" ]; then + echo "" + echo "--dry specified. not starting engine." + exit 0 + fi + + info "Using JVM found at $SLING_JVM" + + if [ -n "$SLING_FOREGROUND" ]; then + ulimit -n $SLING_MAX_OPEN_FILES + info "Starting $SLING_TITLE in foreground" + info "hit Ctrl-C to stop $SLING_TITLE" + printDebug + info "-----------------------------------------------------------" + $jvmExe | tee -a "$SLING_LOG" 2>&1 + exit 0 # in case of failure + else + info "Starting $SLING_TITLE in background..." + printDebug + $0 psmon & + SLING_PID=`cat "$SLING_LOGDIR/sling.pid"` + while [ -z "`cat "$SLING_LOGDIR/sling.pid" 2> /dev/null`" ]; do + sleep 1 + done + info "Started." + fi + exit 0 + ;; +#--------------------------------------------------------------------- + 'psmon') # sets up the process monitor + use_stderr="NO" + if [ -z "jvmExe" ] ; then + usage + exit 0 + fi + trap '' 1 # block HUP + + # exit when TERM signal is received + trap 'info "$SLING_TITLE shutdown on TERM signal"; rm -f "$SLING_LOGDIR/monitor.pid" "$SLING_LOGDIR/sling.pid";exit 0' 15 + + echo $$ > "$SLING_LOGDIR/monitor.pid" + + SHORT_RESTARTS=0 + SLING_BGSTART=1 + export SLING_BGSTART + + while [ 0 ] ; do # forever + BEFORE=`date '+%Y%m%d%H%M%S'` + $0 terminator | $0 bgstart + AFTER=`date '+%Y%m%d%H%M%S'` + + # Check for too many restarts < 10 seconds + # Okay that this will not work across the 59-to-0 second boundary + # because we are just trying to avoid a fast infinite loop caused + # by something like another process already listening on the port + # + RUNTIME=`expr $AFTER - $BEFORE` + if [ $RUNTIME -lt 10 ] ; then + SHORT_RESTARTS=`expr $SHORT_RESTARTS + 1` + else + SHORT_RESTARTS=0 + fi + if [ $SHORT_RESTARTS -eq 3 ] ; then + err "Too many restarts - exiting!" + exit 1 + fi + done + ;; +#--------------------------------------------------------------------- + 'terminator') + use_stderr="NO" + if [ -z "$SLING_BGSTART" ] ; then + usage + exit + fi + # send QUIT to sling upon signal + trap 'slingpid=`/bin/cat "$SLING_LOGDIR/sling.pid" 2>/dev/null` ; if [ -n "$slingpid" ] ; then /bin/kill $slingpid > /dev/null 2>&1 ; fi; rm -f "$SLING_LOGDIR/terminator.pid"; exit 0' 3 6 15 + + echo $$ > "$SLING_LOGDIR/terminator.pid" + while [ 0 ] ; do # forever + sleep 5 + done + ;; +#--------------------------------------------------------------------- + 'bgstart') + use_stderr="NO" + if [ -z "$SLING_BGSTART" ] ; then + usage + exit + fi + ulimit -n $SLING_MAX_OPEN_FILES + echo $$ > "$SLING_LOGDIR/sling.pid" + exec $jvmExe >> "$SLING_LOG" 2>&1 + rm -f "$SLING_LOGDIR/sling.pid" + ;; +#--------------------------------------------------------------------- + 'stop') + use_stderr="YES" + termpid=`/bin/cat "$SLING_LOGDIR/terminator.pid" 2>/dev/null` + monitorpid=`/bin/cat "$SLING_LOGDIR/monitor.pid" 2>/dev/null` + slingpid=`/bin/cat "$SLING_LOGDIR/sling.pid" 2>/dev/null` + if [ -z "$termpid$monitorpid$slingpid" ]; then + echo "$SLING_TITLE not running." + exit 0; + fi + + printf "stopping $SLING_TITLE..." + if [ -n "$monitorpid" ] ; then + /bin/kill $monitorpid > /dev/null 2>&1 + fi + if [ -n "$termpid" ] ; then + /bin/kill $termpid > /dev/null 2>&1 + fi + + COUNTER=0 + while [ -f "$SLING_LOGDIR/sling.pid" ] && [ $COUNTER -lt 20 ]; do + printf "." + COUNTER=`expr $COUNTER + 1` + sleep 1 + done + # this last one is just in case the terminator already failed + slingpid=`/bin/cat "$SLING_LOGDIR/sling.pid" 2>/dev/null` + if ps -p $slingpid > /dev/null 2>&1 ; then + echo "still running. sending TERM signal to PID $slingpid" + /bin/kill $slingpid > /dev/null 2>&1 + else + echo "stopped." + fi + + rm -f "$SLING_LOGDIR/monitor.pid" "$SLING_LOGDIR/terminator.pid" \ + "$SLING_LOGDIR/sling.pid" + ;; +#--------------------------------------------------------------------- + 'restart') + "$0" stop + "$0" start + ;; +#--------------------------------------------------------------------- + 'status') + slingpid=`/bin/cat "$SLING_LOGDIR/sling.pid" 2>/dev/null` + if ps -p $slingpid > /dev/null 2>&1 ; then + echo "$SLING_TITLE is running." + else + echo "$SLING_TITLE is stopped." + fi + ;; +#--------------------------------------------------------------------- + *) + usage + exit + ;; +esac + diff --git a/extensions/app/src/main/dist/unix/start b/extensions/app/src/main/dist/unix/start new file mode 100755 index 00000000..97a495a2 --- /dev/null +++ b/extensions/app/src/main/dist/unix/start @@ -0,0 +1,68 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# This script configures the start information for Sling +# +# The following environment variables may be used to override the default +# defined in the HTTP Service implementation: +# +# % SLING_PORT=56000 ./start +# +# this script is a quick wrapper for the main 'serverctl' script which +# has a lot more options. see +# +# serverctl --help +# +# for details. +# + +# override the java home for use by Apache Sling +# JAVA_HOME="/usr/java/j2sdk1.4.2_14" + +# override the port which Sling should bind to +SLING_PORT='8080' + +# override the interface which communique should listen to +# this setting is not currently supported +# SLING_INTERFACE='0.0.0.0' + +# override the initial log-level prior to reading the configuration +# this overwrites the setting in the sling.properties file and is overwritten +# by the LogService configuration. Use any string of DEBUG, INFO, WARN, +# ERROR and FATAL. Default is INFO. +#SLING_LOGLEVEL='INFO' + +# override the initial heap size in mb for use by Sling +#SLING_HEAP_MIN='128' + +# override the maximum heap size in mb for use by Sling +#SLING_HEAP_MAX='256' + +# ------------------------------------------------------------------------------ +# do not configure below this point +# ------------------------------------------------------------------------------ +export JAVA_HOME +export SLING_PORT +export SLING_INTERFACE +export SLING_LOGLEVEL +export SLING_HEAP_MIN +export SLING_HEAP_MAX + +exec "`dirname $0`/serverctl" start $* diff --git a/extensions/app/src/main/dist/unix/stop b/extensions/app/src/main/dist/unix/stop new file mode 100755 index 00000000..e8ebf9f8 --- /dev/null +++ b/extensions/app/src/main/dist/unix/stop @@ -0,0 +1,23 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# this script stops a running instance of Sling +# +exec "`dirname $0`/serverctl" stop diff --git a/extensions/app/src/main/java/org/sakaiproject/nakamura/app/BootStrapLogger.java b/extensions/app/src/main/java/org/sakaiproject/nakamura/app/BootStrapLogger.java new file mode 100644 index 00000000..b5e7bf85 --- /dev/null +++ b/extensions/app/src/main/java/org/sakaiproject/nakamura/app/BootStrapLogger.java @@ -0,0 +1,7 @@ +package org.sakaiproject.nakamura.app; + +public interface BootStrapLogger { + + void info(String message, Throwable t); + +} diff --git a/extensions/app/src/main/java/org/sakaiproject/nakamura/app/LaunchNakamura.java b/extensions/app/src/main/java/org/sakaiproject/nakamura/app/LaunchNakamura.java new file mode 100644 index 00000000..57df6b48 --- /dev/null +++ b/extensions/app/src/main/java/org/sakaiproject/nakamura/app/LaunchNakamura.java @@ -0,0 +1,392 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sakaiproject.nakamura.app; + +import java.awt.Color; +import java.awt.Desktop; +import java.awt.Dimension; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.io.StringWriter; +import java.io.Writer; +import java.net.HttpURLConnection; +import java.net.URISyntaxException; +import java.net.URL; + +import javax.swing.ImageIcon; +import javax.swing.JOptionPane; + +/** + * This is a simple Swing GUI to launch Nakamura (primarily for the benefit of Windows + * users) from a Java Web Start link. + *

+ * I actually developed this in Netbeans, due to it's support of Swing (and Eclipse's lack + * of) and copied the code into the app package. + * + * @author Chris Dunstall (cdunstall@csu.edu.au) + * @version 1.0 January 6 2011. + */ +public class LaunchNakamura extends javax.swing.JFrame { + + private static final long serialVersionUID = 8161160666368638463L; + public static final int APP_RUNNING = 1; + public static final int APP_NOT_RUNNING = 0; + private static String[] savedArgs; + private int runStatus = APP_NOT_RUNNING; // 0 for off, 1 for on. + private static final String localhostURL = "http://localhost:8080/dev/"; + + /** Creates new form LaunchNakamura */ + public LaunchNakamura() { + initComponents(); + + ImageIcon icon = createImageIcon("/sakaioae-icon.png", "SakaiOAE Logo"); + headingLabel.setIcon(icon); + + String disclaimer = ""; + try { + disclaimer = getLabelText("/readme.txt"); + } catch (IOException e) { + disclaimer = "Use at own risk."; + } + + disclaimerLabel.setText(disclaimer); + disclaimerLabel.setPreferredSize(new Dimension(1, 1)); + + browserButton.setEnabled(false); + } + + /** + * This method is called from within the constructor to initialize the form. + *

+ * Note: This code was generated by Netbeans. + */ + private void initComponents() { + + launchButton = new javax.swing.JButton(); + statusLabel = new javax.swing.JLabel(); + exitButton = new javax.swing.JButton(); + headingLabel = new javax.swing.JLabel(); + disclaimerLabel = new javax.swing.JLabel(); + browserButton = new javax.swing.JButton(); + + setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); + setTitle("Launch Nakamura"); + setName("mainFrame"); // NOI18N + setResizable(false); + + launchButton.setFont(new java.awt.Font("Arial", 0, 13)); // NOI18N + launchButton.setText("Launch"); + launchButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + launchButtonActionPerformed(evt); + } + }); + + statusLabel.setFont(new java.awt.Font("Arial", 0, 12)); // NOI18N + statusLabel.setText("Nakamura is not running."); + + exitButton.setFont(new java.awt.Font("Arial", 0, 13)); // NOI18N + exitButton.setText("Exit"); + exitButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + exitButtonActionPerformed(evt); + } + }); + + headingLabel.setText("SakaiOAE icon"); + headingLabel.setBorder(javax.swing.BorderFactory.createEtchedBorder()); + + disclaimerLabel.setFont(new java.awt.Font("Arial", 0, 13)); // NOI18N + disclaimerLabel.setText("jLabel1"); + disclaimerLabel.setVerticalAlignment(javax.swing.SwingConstants.TOP); + disclaimerLabel.setAutoscrolls(true); + disclaimerLabel.setBorder(javax.swing.BorderFactory.createTitledBorder("Disclaimer")); + + browserButton.setText("Open Sakai OAE"); + browserButton.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + browserButtonActionPerformed(evt); + } + }); + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); + getContentPane().setLayout(layout); + layout.setHorizontalGroup(layout.createParallelGroup( + javax.swing.GroupLayout.Alignment.LEADING).addGroup( + layout + .createSequentialGroup() + .addGroup( + layout + .createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup( + layout + .createSequentialGroup() + .addGap(27, 27, 27) + .addComponent(launchButton) + .addGap(18, 18, 18) + .addComponent(statusLabel) + .addPreferredGap( + javax.swing.LayoutStyle.ComponentPlacement.RELATED, 162, + Short.MAX_VALUE) + .addComponent(browserButton) + .addPreferredGap( + javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(exitButton)) + .addGroup( + javax.swing.GroupLayout.Alignment.CENTER, + layout + .createSequentialGroup() + .addContainerGap() + .addComponent(headingLabel, + javax.swing.GroupLayout.PREFERRED_SIZE, 149, + javax.swing.GroupLayout.PREFERRED_SIZE) + .addGap(18, 18, 18) + .addComponent(disclaimerLabel, + javax.swing.GroupLayout.PREFERRED_SIZE, 493, + javax.swing.GroupLayout.PREFERRED_SIZE))) + .addContainerGap())); + + layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] { + exitButton, launchButton }); + + layout.setVerticalGroup(layout + .createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup( + javax.swing.GroupLayout.Alignment.TRAILING, + layout + .createSequentialGroup() + .addContainerGap() + .addGroup( + layout + .createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(disclaimerLabel, + javax.swing.GroupLayout.PREFERRED_SIZE, 215, + javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(headingLabel, + javax.swing.GroupLayout.PREFERRED_SIZE, 116, + javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGap(58, 58, 58) + .addGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) + .addComponent(browserButton).addComponent(exitButton)) + .addContainerGap()) + .addGroup( + layout + .createSequentialGroup() + .addGap(259, 259, 259) + .addGroup( + layout + .createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(launchButton, + javax.swing.GroupLayout.PREFERRED_SIZE, 50, + javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(statusLabel)) + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))); + + pack(); + } + + /** + * This is the launch button action method. This method launches the Apache Sling + * bootloader and informs the user to wait before accessing it in a browser. + * + * @param evt + * The details of the Action event. + * @throws IOException + */ + private void launchButtonActionPerformed(java.awt.event.ActionEvent evt) { + // Launch Nakamura + if (runStatus == APP_NOT_RUNNING) { + System.setSecurityManager(null); + try { + NakamuraMain.main(savedArgs); + + // Update label + statusLabel.setText("Nakamura is starting..."); + + // Notify the user + JOptionPane.showMessageDialog(this, + "Nakamura has been started.\nPlease allow 30-60 seconds for it to be ready.", + "Information", JOptionPane.INFORMATION_MESSAGE); + + runStatus = APP_RUNNING; + isStartupFinished(); + } catch (IOException e) { + statusLabel.setText("Nakamura is startup failed " + e.getMessage()); + } + } else { + // Can't start it again... + // custom title, warning icon + JOptionPane.showMessageDialog(this, "Nakamura is already running.", "Warning", + JOptionPane.WARNING_MESSAGE); + } + } + + /** + * Pings the Apache Sling server URL every 5 seconds to see if it has finished booting. + * Once it receives an OK status, it enables the button to launch the browser and + * disables the launch Nakamura button. + */ + private void isStartupFinished() { + boolean started = false; + try { + while (!started) { + if (exists(localhostURL)) + started = true; + Thread.sleep(5 * 1000); + } + } catch (InterruptedException e) { + e.printStackTrace(); + } + + if (started) { + statusLabel.setText("Nakamura is running."); + statusLabel.setForeground(Color.green); + launchButton.setEnabled(false); + browserButton.setEnabled(true); + } + } + + /** + * Pings the Apache Sling server URL, looking for an OK status. Returns true once that + * OK status is received. + * + * @param URLName + * The URL to ping. + * @return true if OK status is received back. False if not OK. + */ + public static boolean exists(String URLName) { + try { + HttpURLConnection.setFollowRedirects(false); + // note : you may also need + // HttpURLConnection.setInstanceFollowRedirects(false) + HttpURLConnection con = (HttpURLConnection) new URL(URLName).openConnection(); + con.setRequestMethod("HEAD"); + return (con.getResponseCode() == HttpURLConnection.HTTP_OK); + } catch (Exception e) { + e.printStackTrace(); + return false; + } + } + + /** + * Performs the action when the exit button is pressed, which is exit the program. + * + * @param evt + * The details of the event. + */ + private void exitButtonActionPerformed(java.awt.event.ActionEvent evt) { + // Kill Nakamura and the GUI app. + System.exit(0); + } + + /** + * Performs the action when the browser button is pressed, which is launch a web browser + * and browse to the server URL. + * + * @param evt + * The details of the event. + */ + private void browserButtonActionPerformed(java.awt.event.ActionEvent evt) { + try { + Desktop.getDesktop().browse(new URL(localhostURL).toURI()); + } catch (IOException e) { + System.err.println("IO Exception: " + e.getMessage()); + } catch (URISyntaxException e) { + System.err.println("URISyntaxException: " + e.getMessage()); + } + } + + /** + * Returns an ImageIcon, or null if the path was invalid. + * + * @param path + * The path to the icon. + * @param description + * The description of the icon. + * @return the newly created ImageIcon or null. + */ + protected ImageIcon createImageIcon(String path, String description) { + java.net.URL imgURL = getClass().getResource(path); + if (imgURL != null) { + return new ImageIcon(imgURL, description); + } else { + System.err.println("Couldn't find file: " + path); + return null; + } + } + + /** + * Returns the full contents of a (assumed) text file for use in a label. + * + * @param path + * The path to the text file. + * @return The contents of the file as a String. + * @throws IOException + * thrown if the file is unable to be read. + */ + protected String getLabelText(String path) throws IOException { + InputStream is = this.getClass().getResourceAsStream(path); + if (is != null) { + Writer writer = new StringWriter(); + + char[] buffer = new char[1024]; + try { + Reader reader = new BufferedReader(new InputStreamReader(is, "UTF-8")); + int n; + while ((n = reader.read(buffer)) != -1) { + writer.write(buffer, 0, n); + } + } finally { + is.close(); + } + return writer.toString(); + } else { + System.err.println("Couldn't find file: " + path); + return null; + } + } + + /** + * The Main method which executes the program. + * + * @param args + * the command line arguments + */ + public static void main(String args[]) { + savedArgs = args; + java.awt.EventQueue.invokeLater(new Runnable() { + + public void run() { + new LaunchNakamura().setVisible(true); + } + }); + } + + // Variables declaration - do not modify + private javax.swing.JButton browserButton; + private javax.swing.JLabel disclaimerLabel; + private javax.swing.JButton exitButton; + private javax.swing.JLabel headingLabel; + private javax.swing.JButton launchButton; + private javax.swing.JLabel statusLabel; + // End of variables declaration + +} diff --git a/extensions/app/src/main/java/org/sakaiproject/nakamura/app/NakamuraMain.java b/extensions/app/src/main/java/org/sakaiproject/nakamura/app/NakamuraMain.java new file mode 100644 index 00000000..0b698b9e --- /dev/null +++ b/extensions/app/src/main/java/org/sakaiproject/nakamura/app/NakamuraMain.java @@ -0,0 +1,312 @@ +package org.sakaiproject.nakamura.app; + +import org.apache.sling.launchpad.app.Main; +import org.apache.sling.launchpad.base.shared.SharedConstants; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintStream; +import java.net.MalformedURLException; +import java.net.URL; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +public class NakamuraMain { + + private static final String[] BUNDLE_SOURCE_LOCATIONS = new String[] { "SLING-INF/static", + "SLING-INF/home" }; + private static final String[] FS_DEST_LOCATIONS = new String[] { "sling/static", null }; + // The name of the environment variable to consult to find out + // about sling.home + private static final String ENV_SLING_HOME = "SLING_HOME"; + private static String slingHome; + private static Map parsedArgs; + + public static void main(String[] args) throws IOException { + if (checkLaunchDate(args)) { + // new jar check for new content + UnBundleStaticContent unBundleStaticContent = new UnBundleStaticContent( + new BootStrapLogger() { + + @Override + public void info(String message, Throwable t) { + NakamuraMain.info(message, t); + + } + + }); + // allow the command line to add mappings using --mappings source:dest,source:dest + String[] destLocations = FS_DEST_LOCATIONS; + String[] sourceLocations = BUNDLE_SOURCE_LOCATIONS; + destLocations[1] = slingHome; + String staticContentMappings = parsedArgs.get("mappings"); + if ( staticContentMappings != null ) { + String[] parts = staticContentMappings.split(","); + String[] tmpDestLocations = new String[destLocations.length+parts.length]; + String[] tmpSourceLocations = new String[sourceLocations.length+parts.length]; + System.arraycopy(destLocations, 0, tmpDestLocations, 0, destLocations.length); + System.arraycopy(sourceLocations, 0, tmpSourceLocations, 0, sourceLocations.length); + for ( int i = 0; i < parts.length; i++) { + String[] m = parts[i].split(":"); + tmpSourceLocations[i+sourceLocations.length] = m[0]; + tmpDestLocations[i+destLocations.length] = m[1]; + } + sourceLocations = tmpSourceLocations; + destLocations = tmpDestLocations; + } + unBundleStaticContent.extract(unBundleStaticContent.getClass(), + "resources/bundles/", sourceLocations, destLocations); + } + System.setSecurityManager(null); + Main.main(args); + } + + private static boolean checkLaunchDate(String[] args) throws IOException { + // Find the last modified of this jar + parsedArgs = parseCommandLine(args); + // Find the last modified when the jar was loaded. + slingHome = getSlingHome(parsedArgs); + try { + String resource = NakamuraMain.class.getName().replace('.', '/') + + ".class"; + URL u = NakamuraMain.class.getClassLoader().getResource(resource); + String jarFilePath = u.getFile(); + jarFilePath = jarFilePath.substring(0, jarFilePath.length() + - resource.length() - 2); + u = new URL(jarFilePath); + jarFilePath = u.getFile(); + File jarFile = new File(jarFilePath); + info("Loading from " + jarFile, null); + long lastModified = jarFile.lastModified(); + + File slingHomeFile = new File(slingHome); + File loaderTimestamp = new File(slingHome, ".lauchpadLastModified"); + long launchpadLastModified = 0L; + if (loaderTimestamp.exists()) { + BufferedReader fr = null; + try { + fr = new BufferedReader(new FileReader(loaderTimestamp)); + launchpadLastModified = Long.parseLong(fr.readLine()); + } catch (NumberFormatException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } finally { + if (fr != null) { + try { + fr.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + } else { + info("No loader timestamp ", null); + } + + // if the jar is newer, then delete the bootstrap servialization + // file that will + // cause the contents of the jar to replace the contents on disk. + + if (launchpadLastModified < lastModified) { + File bundleSer = new File(slingHomeFile, + "felix/bundle0/bootstrapinstaller.ser"); + if (bundleSer.exists()) { + info("Launcer Jar is newer than runtime image, removing bundle state, jar will reload ", + null); + bundleSer.delete(); + } else { + info("No runtime, will use contents of launcher jar", null); + } + slingHomeFile.mkdirs(); + FileWriter fw = new FileWriter(loaderTimestamp); + fw.write(String.valueOf(lastModified)); + fw.close(); + fw = null; + return true; + } else { + info("Runtime image, newer than launcher, using runtime image ", + null); + } + } catch (MalformedURLException e) { + info("Not launching from a jar ", null); + } + return false; + + } + + /** + * Define the sling.home parameter implementing the algorithme defined on + * the wiki page to find the setting according to this algorithm: + *

    + *
  1. Command line option -c
  2. + *
  3. System property sling.home
  4. + *
  5. Environment variable SLING_HOME
  6. + *
  7. Default value sling
  8. + *
+ * + * @param args + * The command line arguments + * @return The value to use for sling.home + */ + private static String getSlingHome(Map commandLine) { + String source = null; + + String slingHome = commandLine.get("c"); + if (slingHome != null) { + + source = "command line"; + + } else { + + slingHome = System.getProperty(SharedConstants.SLING_HOME); + if (slingHome != null) { + + source = "system property sling.home"; + + } else { + + slingHome = System.getenv(ENV_SLING_HOME); + if (slingHome != null) { + + source = "environment variable SLING_HOME"; + + } else { + + source = "default"; + slingHome = SharedConstants.SLING_HOME_DEFAULT; + + } + } + } + + info("Setting sling.home=" + slingHome + " (" + source + ")", null); + return slingHome; + } + + /** + * Parses the command line arguments into a map of strings indexed by + * strings. This method suppports single character option names only at the + * moment. Each pair of an option name and its value is stored into the map. + * If a single dash '-' character is encountered the rest of the command + * line are interpreted as option names and are stored in the map unmodified + * as entries with the same key and value. + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Command LineMapping
xx -> x
-y zy -> z
-yzy -> z
-y -zy -> y, z -> z
-y x - -z ay -> x, -z -> -z, a -> a
+ * + * @param args + * The command line to parse + * + * @return The map of command line options and their values + */ + static Map parseCommandLine(String[] args) { + Map commandLine = new HashMap(); + boolean readUnparsed = false; + for (int argc = 0; args != null && argc < args.length; argc++) { + String arg = args[argc]; + + if (readUnparsed) { + commandLine.put(arg, arg); + } else if (arg.startsWith("-")) { + if (arg.length() == 1) { + readUnparsed = true; + } else { + String key = String.valueOf(arg.charAt(1)); + if (arg.length() > 2) { + commandLine.put(key, arg.substring(2)); + } else { + argc++; + if (argc < args.length + && (args[argc].equals("-") || !args[argc] + .startsWith("-"))) { + commandLine.put(key, args[argc]); + } else { + commandLine.put(key, key); + argc--; + } + } + } + } else { + commandLine.put(arg, arg); + } + } + return commandLine; + } + + // ---------- logging + + // emit an informational message to standard out + static void info(String message, Throwable t) { + log(System.out, "*INFO*", message, t); + } + + // emit an error message to standard err + static void error(String message, Throwable t) { + log(System.err, "*ERROR*", message, t); + } + + private static final DateFormat fmt = new SimpleDateFormat( + "dd.MM.yyyy HH:mm:ss.SSS "); + + // helper method to format the message on the correct output channel + // the throwable if not-null is also prefixed line by line with the prefix + private static void log(PrintStream out, String prefix, String message, + Throwable t) { + + final StringBuilder linePrefixBuilder = new StringBuilder(); + synchronized (fmt) { + linePrefixBuilder.append(fmt.format(new Date())); + } + linePrefixBuilder.append(prefix); + linePrefixBuilder.append(" ["); + linePrefixBuilder.append(Thread.currentThread().getName()); + linePrefixBuilder.append("] "); + final String linePrefix = linePrefixBuilder.toString(); + + out.print(linePrefix); + out.println(message); + if (t != null) { + t.printStackTrace(new PrintStream(out) { + @Override + public void println(String x) { + synchronized (this) { + print(linePrefix); + super.println(x); + flush(); + } + } + }); + } + } + +} diff --git a/extensions/app/src/main/java/org/sakaiproject/nakamura/app/UnBundleStaticContent.java b/extensions/app/src/main/java/org/sakaiproject/nakamura/app/UnBundleStaticContent.java new file mode 100644 index 00000000..73f1377e --- /dev/null +++ b/extensions/app/src/main/java/org/sakaiproject/nakamura/app/UnBundleStaticContent.java @@ -0,0 +1,146 @@ +package org.sakaiproject.nakamura.app; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +/** + * Extracts static content from an OSGi bootstrap jar + * + * @author ieb + * + */ +public class UnBundleStaticContent { + + private byte[] buffer = new byte[4096]; + private BootStrapLogger logger; + + public UnBundleStaticContent(BootStrapLogger bootStrapLogger) { + this.logger = bootStrapLogger; + } + + /** + * @param markerClass + * a class to identify the jar in which the content is to be + * loaded from, normally the bootstrap jar. + * @param bundlePath + * the path in the jar where OSGi bundles are contained. + * @param strings + * the path within the OSGi bundle where static content is + * located. + * @param strings2 + * the folder where that content will be unpacked. + * @throws MalformedURLException + * if the classpath element containing the marker class is not a + * jar. + * @throws IOException + * if the bootstrap jar file can't be opened. + */ + public void extract(Class markerClass, String bundlePath, + String[] source, String[] dest) throws MalformedURLException, + IOException { + File tempFolder = File.createTempFile("unpack", "bundles"); + + tempFolder.delete(); + if (!tempFolder.mkdir()) { + throw new IOException( + "Unable to create working space at " + + tempFolder.getAbsolutePath() + + " (deleted a temp file but failed to recreate it as a directory, " + + "please report at https://github.com/ieb/sparsemapcontent )"); + } + try { + List unpackedBundles = unpackJarContents( + getContainingJarFile(markerClass), + new String[] { bundlePath }, + new String[] { tempFolder.getAbsolutePath() }); + for (File f : unpackedBundles) { + try { + unpackJarContents(f, source, dest); + } catch (IOException e) { + logger.info("Failed to Unpack " + f.getName(), e); + } + } + } finally { + deleteAll(tempFolder); + } + } + + private void deleteAll(File tempFolder) { + if (tempFolder.exists()) { + if (tempFolder.isDirectory()) { + for (File f : tempFolder.listFiles()) { + deleteAll(f); + } + } + tempFolder.delete(); + } + } + + private List unpackJarContents(File containingJarFile, + String[] source, String[] dest) throws IOException { + JarFile jf = new JarFile(containingJarFile); + List files = new ArrayList(); + for (Enumeration jee = jf.entries(); jee.hasMoreElements();) { + JarEntry je = jee.nextElement(); + String name = je.getName(); + for (int i = 0; i < source.length; i++) { + if (name.startsWith(source[i]) && !je.isDirectory()) { + File target = new File(dest[i], name.substring(source[i] + .length())); + if (!target.exists() + || target.lastModified() < je.getTime() + || je.getTime() < 0) { + target.getParentFile().mkdirs(); + OutputStream out = new FileOutputStream(target); + InputStream in = jf.getInputStream(je); + copy(in, out); + out.close(); + in.close(); + logger.info("Updated " + target.getAbsoluteFile(), + null); + } else { + logger.info("No Update to " + target.getAbsoluteFile(), + null); + + } + files.add(target); + } + } + } + return files; + } + + private void copy(InputStream in, OutputStream out) throws IOException { + int i = 0; + while ((i = in.read(buffer)) >= 0) { + if (i == 0) { + Thread.yield(); + } else { + out.write(buffer, 0, i); + } + } + } + + private File getContainingJarFile(Class clazz) + throws MalformedURLException { + String resource = clazz.getName().replace('.', '/') + ".class"; + URL u = clazz.getClassLoader().getResource(resource); + String jarFilePath = u.getFile(); + jarFilePath = jarFilePath.substring(0, + jarFilePath.length() - resource.length() - 2); + u = new URL(jarFilePath); + jarFilePath = u.getFile(); + return new File(jarFilePath); + } + +} diff --git a/extensions/app/src/main/resources/jre-1.5.properties b/extensions/app/src/main/resources/jre-1.5.properties new file mode 100644 index 00000000..fc9e1254 --- /dev/null +++ b/extensions/app/src/main/resources/jre-1.5.properties @@ -0,0 +1,143 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# The file contains the Java 1.5 platform packages used as system packages +# for the current platform. +jre.mods.doc = The JRE Settings have javax.trasacton removed for Sakai to use JTA 1.1 +jre-1.5= \ + javax.accessibility,\ + javax.activity,\ + javax.crypto,\ + javax.crypto.interfaces,\ + javax.crypto.spec,\ + javax.imageio,\ + javax.imageio.event,\ + javax.imageio.metadata,\ + javax.imageio.plugins.bmp,\ + javax.imageio.plugins.jpeg,\ + javax.imageio.spi,\ + javax.imageio.stream,\ + javax.management,\ + javax.management.loading,\ + javax.management.modelmbean,\ + javax.management.monitor,\ + javax.management.openmbean,\ + javax.management.relation,\ + javax.management.remote,\ + javax.management.remote.rmi,\ + javax.management.timer,\ + javax.naming,\ + javax.naming.directory,\ + javax.naming.event,\ + javax.naming.ldap,\ + javax.naming.spi,\ + javax.net,\ + javax.net.ssl,\ + javax.print,\ + javax.print.attribute,\ + javax.print.attribute.standard,\ + javax.print.event,\ + javax.rmi,\ + javax.rmi.CORBA,\ + javax.rmi.ssl,\ + javax.security.auth,\ + javax.security.auth.callback,\ + javax.security.auth.kerberos,\ + javax.security.auth.login,\ + javax.security.auth.spi,\ + javax.security.auth.x500,\ + javax.security.cert,\ + javax.security.sasl,\ + javax.sound.midi,\ + javax.sound.midi.spi,\ + javax.sound.sampled,\ + javax.sound.sampled.spi,\ + javax.sql,\ + javax.sql.rowset,\ + javax.sql.rowset.serial,\ + javax.sql.rowset.spi,\ + javax.swing,\ + javax.swing.border,\ + javax.swing.colorchooser,\ + javax.swing.event,\ + javax.swing.filechooser,\ + javax.swing.plaf,\ + javax.swing.plaf.basic,\ + javax.swing.plaf.metal,\ + javax.swing.plaf.multi,\ + javax.swing.plaf.synth,\ + javax.swing.table,\ + javax.swing.text,\ + javax.swing.text.html,\ + javax.swing.text.html.parser,\ + javax.swing.text.rtf,\ + javax.swing.tree,\ + javax.swing.undo,\ + javax.xml,\ + javax.xml.datatype,\ + javax.xml.namespace,\ + javax.xml.parsers,\ + javax.xml.transform,\ + javax.xml.transform.dom,\ + javax.xml.transform.sax,\ + javax.xml.transform.stream,\ + javax.xml.validation,\ + javax.xml.xpath,\ + org.ietf.jgss,\ + org.omg.CORBA,\ + org.omg.CORBA_2_3,\ + org.omg.CORBA_2_3.portable,\ + org.omg.CORBA.DynAnyPackage,\ + org.omg.CORBA.ORBPackage,\ + org.omg.CORBA.portable,\ + org.omg.CORBA.TypeCodePackage,\ + org.omg.CosNaming,\ + org.omg.CosNaming.NamingContextExtPackage,\ + org.omg.CosNaming.NamingContextPackage,\ + org.omg.Dynamic,\ + org.omg.DynamicAny,\ + org.omg.DynamicAny.DynAnyFactoryPackage,\ + org.omg.DynamicAny.DynAnyPackage,\ + org.omg.IOP,\ + org.omg.IOP.CodecFactoryPackage,\ + org.omg.IOP.CodecPackage,\ + org.omg.Messaging,\ + org.omg.PortableInterceptor,\ + org.omg.PortableInterceptor.ORBInitInfoPackage,\ + org.omg.PortableServer,\ + org.omg.PortableServer.CurrentPackage,\ + org.omg.PortableServer.POAManagerPackage,\ + org.omg.PortableServer.POAPackage,\ + org.omg.PortableServer.portable,\ + org.omg.PortableServer.ServantLocatorPackage,\ + org.omg.SendingContext,\ + org.omg.stub.java.rmi,\ + org.w3c.dom,\ + org.w3c.dom.bootstrap,\ + org.w3c.dom.css,\ + org.w3c.dom.events,\ + org.w3c.dom.html,\ + org.w3c.dom.ls,\ + org.w3c.dom.ranges,\ + org.w3c.dom.stylesheets,\ + org.w3c.dom.traversal,\ + org.w3c.dom.views,\ + org.xml.sax,\ + org.xml.sax.ext,\ + org.xml.sax.helpers diff --git a/extensions/app/src/main/resources/jre-1.6.properties b/extensions/app/src/main/resources/jre-1.6.properties new file mode 100644 index 00000000..550d02c1 --- /dev/null +++ b/extensions/app/src/main/resources/jre-1.6.properties @@ -0,0 +1,180 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# The file contains the Java 1.6 platform packages used as system packages +# for the current platform. +jre.mods.doc = The JRE Settings have javax.trasacton removed for Sakai to use JTA 1.1 +jre-1.6= \ + javax.accessibility,\ + javax.activation,\ + javax.activity,\ + javax.annotation,\ + javax.annotation.processing,\ + javax.crypto,\ + javax.crypto.interfaces,\ + javax.crypto.spec,\ + javax.imageio,\ + javax.imageio.event,\ + javax.imageio.metadata,\ + javax.imageio.plugins.bmp,\ + javax.imageio.plugins.jpeg,\ + javax.imageio.spi,\ + javax.imageio.stream,\ + javax.jws,\ + javax.jws.soap,\ + javax.lang.model,\ + javax.lang.model.element,\ + javax.lang.model.type,\ + javax.lang.model.util,\ + javax.management,\ + javax.management.loading,\ + javax.management.modelmbean,\ + javax.management.monitor,\ + javax.management.openmbean,\ + javax.management.relation,\ + javax.management.remote,\ + javax.management.remote.rmi,\ + javax.management.timer,\ + javax.naming,\ + javax.naming.directory,\ + javax.naming.event,\ + javax.naming.ldap,\ + javax.naming.spi,\ + javax.net,\ + javax.net.ssl,\ + javax.print,\ + javax.print.attribute,\ + javax.print.attribute.standard,\ + javax.print.event,\ + javax.rmi,\ + javax.rmi.CORBA,\ + javax.rmi.ssl,\ + javax.script,\ + javax.security.auth,\ + javax.security.auth.callback,\ + javax.security.auth.kerberos,\ + javax.security.auth.login,\ + javax.security.auth.spi,\ + javax.security.auth.x500,\ + javax.security.cert,\ + javax.security.sasl,\ + javax.smartcardio,\ + javax.sound.midi,\ + javax.sound.midi.spi,\ + javax.sound.sampled,\ + javax.sound.sampled.spi,\ + javax.sql,\ + javax.sql.rowset,\ + javax.sql.rowset.serial,\ + javax.sql.rowset.spi,\ + javax.swing,\ + javax.swing.border,\ + javax.swing.colorchooser,\ + javax.swing.event,\ + javax.swing.filechooser,\ + javax.swing.plaf,\ + javax.swing.plaf.basic,\ + javax.swing.plaf.metal,\ + javax.swing.plaf.multi,\ + javax.swing.plaf.synth,\ + javax.swing.table,\ + javax.swing.text,\ + javax.swing.text.html,\ + javax.swing.text.html.parser,\ + javax.swing.text.rtf,\ + javax.swing.tree,\ + javax.swing.undo,\ + javax.tools,\ + javax.xml,\ + javax.xml.bind,\ + javax.xml.bind.annotation,\ + javax.xml.bind.annotation.adapters,\ + javax.xml.bind.attachment,\ + javax.xml.bind.helpers,\ + javax.xml.bind.util,\ + javax.xml.crypto,\ + javax.xml.crypto.dom,\ + javax.xml.crypto.dsig,\ + javax.xml.crypto.dsig.dom,\ + javax.xml.crypto.dsig.keyinfo,\ + javax.xml.crypto.dsig.spec,\ + javax.xml.datatype,\ + javax.xml.namespace,\ + javax.xml.parsers,\ + javax.xml.soap,\ + javax.xml.stream,\ + javax.xml.stream.events,\ + javax.xml.stream.util,\ + javax.xml.transform,\ + javax.xml.transform.dom,\ + javax.xml.transform.sax,\ + javax.xml.transform.stax,\ + javax.xml.transform.stream,\ + javax.xml.validation,\ + javax.xml.ws,\ + javax.xml.ws.handler,\ + javax.xml.ws.handler.soap,\ + javax.xml.ws.http,\ + javax.xml.ws.soap,\ + javax.xml.ws.spi,\ + javax.xml.xpath,\ + org.ietf.jgss,\ + org.omg.CORBA,\ + org.omg.CORBA_2_3,\ + org.omg.CORBA_2_3.portable,\ + org.omg.CORBA.DynAnyPackage,\ + org.omg.CORBA.ORBPackage,\ + org.omg.CORBA.portable,\ + org.omg.CORBA.TypeCodePackage,\ + org.omg.CosNaming,\ + org.omg.CosNaming.NamingContextExtPackage,\ + org.omg.CosNaming.NamingContextPackage,\ + org.omg.Dynamic,\ + org.omg.DynamicAny,\ + org.omg.DynamicAny.DynAnyFactoryPackage,\ + org.omg.DynamicAny.DynAnyPackage,\ + org.omg.IOP,\ + org.omg.IOP.CodecFactoryPackage,\ + org.omg.IOP.CodecPackage,\ + org.omg.Messaging,\ + org.omg.PortableInterceptor,\ + org.omg.PortableInterceptor.ORBInitInfoPackage,\ + org.omg.PortableServer,\ + org.omg.PortableServer.CurrentPackage,\ + org.omg.PortableServer.POAManagerPackage,\ + org.omg.PortableServer.POAPackage,\ + org.omg.PortableServer.portable,\ + org.omg.PortableServer.ServantLocatorPackage,\ + org.omg.SendingContext,\ + org.omg.stub.java.rmi,\ + org.omg.stub.javax.management.remote.rmi,\ + org.w3c.dom,\ + org.w3c.dom.bootstrap,\ + org.w3c.dom.css,\ + org.w3c.dom.events,\ + org.w3c.dom.html,\ + org.w3c.dom.ls,\ + org.w3c.dom.ranges,\ + org.w3c.dom.stylesheets,\ + org.w3c.dom.traversal,\ + org.w3c.dom.views,\ + org.w3c.dom.xpath,\ + org.xml.sax,\ + org.xml.sax.ext,\ + org.xml.sax.helpers diff --git a/extensions/app/src/main/resources/readme.txt b/extensions/app/src/main/resources/readme.txt new file mode 100644 index 00000000..5c1792ca --- /dev/null +++ b/extensions/app/src/main/resources/readme.txt @@ -0,0 +1,3 @@ + +This pre-release is intended for review and to support the development goals of the project; it is not intended for use in production. It is NOT a scalable platform for course management in most colleges or universities. + diff --git a/extensions/app/src/main/resources/sakaioae-icon.png b/extensions/app/src/main/resources/sakaioae-icon.png new file mode 100644 index 00000000..791905b1 Binary files /dev/null and b/extensions/app/src/main/resources/sakaioae-icon.png differ diff --git a/extensions/app/src/main/resources/sling_install.properties b/extensions/app/src/main/resources/sling_install.properties new file mode 100644 index 00000000..315719bf --- /dev/null +++ b/extensions/app/src/main/resources/sling_install.properties @@ -0,0 +1,10 @@ +# This is set here so that it can be overridden on the command line. +# By default it wont be loaded since on first startup nakamura.properties wont exist in ${sling.home} +# The way to load a custom set of properties is to use -Dsling.include.nakamura=/absolute/path/to/nakamura.properties +# Which will override this location. +# NB, that if the properties are required on evey startup they must be either provided on the command line every time +# or added to ${sling.home}/nakamura.properties after the system has started. +sling.include.nakamura = nakamura.properties +org.apache.felix.eventadmin.Timeout=0 +sling.bootdelegation.profiling.eclipse=org.eclipse.tptp.martini, org.eclipse.tptp.martini.* +sling.bootdelegation.profiling.netbeans=org.netbeans.lib.profiler.server, org.netbeans.lib.profiler.server.* diff --git a/extensions/app/static/me.json b/extensions/app/static/me.json new file mode 100644 index 00000000..0e59f036 --- /dev/null +++ b/extensions/app/static/me.json @@ -0,0 +1 @@ +{"user":{"anon":true,"subjects":[],"superUser":false},"profile":{"basic":{"access":"everybody","elements":{"lastName":{"value":"User"},"email":{"value":"anon@sakai.invalid"},"firstName":{"value":"Anonymous"}}},"rep:userId":"anonymous"},"messages":{"unread":0},"contacts":{},"groups":[]} \ No newline at end of file diff --git a/extensions/app/static/tags/directory.tagged.json b/extensions/app/static/tags/directory.tagged.json new file mode 100644 index 00000000..685cc09e --- /dev/null +++ b/extensions/app/static/tags/directory.tagged.json @@ -0,0 +1 @@ +{"biologicalsciences":{"jcr:path":"/tags/directory/biologicalsciences","jcr:name":"biologicalsciences","sling:resourceType":"sakai/tag","jcr:createdBy":"johnfking2","sakai:tag-name":"directory/biologicalsciences","jcr:created":"2011-10-07T17:04:41+00:00","jcr:primaryType":"sling:Folder","content":{}},"mathematicalandcomputersciences":{"jcr:path":"/tags/directory/mathematicalandcomputersciences","jcr:name":"mathematicalandcomputersciences","sling:resourceType":"sakai/tag","jcr:createdBy":"johnfking2","sakai:tag-name":"directory/mathematicalandcomputersciences","jcr:created":"2011-10-07T17:04:41+00:00","jcr:primaryType":"sling:Folder","content":{}},"linguisticsclassicsandrelatedsubjects":{"jcr:path":"/tags/directory/linguisticsclassicsandrelatedsubjects","jcr:name":"linguisticsclassicsandrelatedsubjects","jcr:createdBy":"johnfking2","jcr:created":"2011-10-07T17:04:41+00:00","jcr:primaryType":"sling:Folder","content":{}},"historicalandphilosophicalstudies":{"jcr:path":"/tags/directory/historicalandphilosophicalstudies","jcr:name":"historicalandphilosophicalstudies","jcr:createdBy":"johnfking2","jcr:created":"2011-10-07T17:04:41+00:00","jcr:primaryType":"sling:Folder","content":{}},"creativeartsanddesign":{"jcr:path":"/tags/directory/creativeartsanddesign","jcr:name":"creativeartsanddesign","sling:resourceType":"sakai/tag","sakai:tag-name":"directory/creativeartsanddesign","jcr:createdBy":"johnfking2","sakai:tag-count":1,"jcr:created":"2011-10-07T17:04:41+00:00","jcr:primaryType":"sling:Folder","content":{"_lastModifiedBy":"john1","sakai:preview-type":"video","sakai:permissions":"public","_mimeType":"x-sakai/link","_createdBy":"admin","sakai:preview-avatar":"http://img.youtube.com/vi/YnSBWvXxtwQ/0.jpg","sakai:preview-url":"http://www.youtube.com/watch?v=YnSBWvXxtwQ","sakai:tag-uuid":["7a14bcca-bd45-4c61-ac93-9645abb04949"],"_path":"ij3cAiUGq","sakai:description":"","sakai:tags":["directory/creativeartsanddesign"],"sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["john1"],"_created":1318007437505,"sakai:pooled-content-viewer":["anonymous","bugbashtest","everyone"],"sakai:pool-content-created-for":"john1","_id":"Rr4zEPEHEeCa7Flirf_xsA+","sakai:pooled-content-file-name":"Online eBooks","_lastModified":1318008041011,"sakai:pooled-content-url":"http://www.youtube.com/watch?v=YnSBWvXxtwQ","sakai:copyright":"creativecommons","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-07T17:20:41+00:00","jcr:mimeType":"x-sakai/link","jcr:data":0}}} \ No newline at end of file diff --git a/extensions/app/static/test.html b/extensions/app/static/test.html new file mode 100644 index 00000000..0f060282 --- /dev/null +++ b/extensions/app/static/test.html @@ -0,0 +1 @@ +testing. diff --git a/extensions/app/static/ui b/extensions/app/static/ui new file mode 120000 index 00000000..9a557023 --- /dev/null +++ b/extensions/app/static/ui @@ -0,0 +1 @@ +/Users/ieb/timefields/iebux \ No newline at end of file diff --git a/extensions/app/static/var/search/activity/all.json b/extensions/app/static/var/search/activity/all.json new file mode 100644 index 00000000..f6ff5c4c --- /dev/null +++ b/extensions/app/static/var/search/activity/all.json @@ -0,0 +1 @@ +{"items":12,"results":[{"_lastModifiedBy":"jsmith","sakai:permissions":"public","_mimeType":"x-sakai/document","_createdBy":"admin","_path":"iOzTWedOwb","sakai:description":"Nerts r us","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-viewer":["anonymous","everyone"],"sakai:pooled-content-manager":["jsmith"],"_created":1318873024310,"sakai:pool-content-created-for":"jsmith","_id":"oK7tYPjmEeCa7Flirf_xsA+","structure0":"{\"page1\":{\"_ref\":\"id4599079\",\"_order\":0,\"_title\":\"Page Title 1\",\"main\":{\"_ref\":\"id4599079\",\"_order\":0,\"_title\":\"Page Title 1\",\"_poolpath\":\"/p/iOzTWedOwb\",\"_childCount\":0,\"_id\":\"main\",\"_elements\":[]},\"_canEdit\":true,\"_canSubedit\":true,\"_poolpath\":\"/p/iOzTWedOwb\",\"_childCount\":1,\"_id\":\"page1\",\"_elements\":[{\"_ref\":\"id4599079\",\"_order\":0,\"_title\":\"Page Title 1\",\"_poolpath\":\"/p/iOzTWedOwb\",\"_childCount\":0,\"_id\":\"main\",\"_elements\":[]}]},\"_childCount\":1,\"id9027443\":{\"_ref\":\"id9027443\",\"_title\":\"Untitled Page\",\"_order\":1,\"_canSubedit\":true,\"_canEdit\":true,\"_poolpath\":\"/p/iOzTWedOwb\",\"main\":{\"_ref\":\"id9027443\",\"_order\":0,\"_title\":\"Untitled Page\",\"_childCount\":0,\"_canSubedit\":true,\"_canEdit\":true,\"_poolpath\":\"/p/iOzTWedOwb\",\"_id\":\"main\",\"_elements\":[]},\"_childCount\":1,\"_id\":\"id9027443\",\"_elements\":[{\"_ref\":\"id9027443\",\"_order\":0,\"_title\":\"Untitled Page\",\"_childCount\":0,\"_canSubedit\":true,\"_canEdit\":true,\"_poolpath\":\"/p/iOzTWedOwb\",\"_id\":\"main\",\"_elements\":[]}]}}","sakai:pooled-content-file-name":"Nerts","_lastModified":1318873038334,"sakai:copyright":"creativecommons","sakai:activity-source":"iOzTWedOwb","sakai:activity-type":"pooled content","sakai:activity-appid":"default","sakai:activity-actor":"jsmith","sakai:activityMessage":"UPDATED_CONTENT","who":{"hash":"jsmith","basic":{"access":"everybody","elements":{"lastName":{"value":"Smith"},"email":{"value":"jsmith@example.com"},"firstName":{"value":"John"}}},"rep:userId":"jsmith","userid":"jsmith","counts":{"contactsCount":0,"membershipsCount":0,"contentCount":1,"countLastUpdate":1318904551549},"sakai:excludeSearch":false}},{"sling:resourceType":"sakai/user-home","_created":1318872946275,"_id":"ciu7MPjmEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_lastModified":1318872946275,"_createdBy":"admin","_path":"a:jsmith","sakai:activity-source":"a:jsmith","sakai:activity-type":"user","sakai:activity-appid":"default","sakai:activity-actor":"admin","sakai:activityMessage":"USER_CREATED","who":{"hash":"admin","basic":{"access":"everybody","elements":{"lastName":{"value":"User"},"email":{"value":"admin@sakai.invalid"},"firstName":{"value":"Admin"}}},"rep:userId":"admin","userid":"admin","counts":{},"sakai:excludeSearch":false}},{"_lastModifiedBy":"username01","sakai:permissions":"public","_mimeType":"x-sakai/document","_createdBy":"admin","_path":"iOTGP34WGq","sakai:description":"","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-viewer":["anonymous","public-group-member","everyone"],"sakai:pooled-content-manager":["username01","public-group-manager"],"_created":1318866739627,"sakai:pool-content-created-for":"username01","_id":"_rhnsPjXEeCa7Flirf_xsA+","sakai:pooled-content-file-name":"Participants","structure0":"{\"participants\":{\"_ref\":\"id49119302\",\"_order\":0,\"_title\":\"Participants\",\"_nonEditable\":true,\"main\":{\"_ref\":\"id49119302\",\"_order\":0,\"_nonEditable\":true,\"_title\":\"Participants\"}}}","_lastModified":1318866740189,"sakai:copyright":"creativecommons","sakai:activity-source":"iOTGP34WGq","sakai:activity-type":"pooled content","sakai:activity-appid":"default","sakai:activity-actor":"username01","sakai:activityMessage":"UPDATED_CONTENT","who":{"hash":"username01","basic":{"access":"everybody","elements":{"lastName":{"value":"Lastname01"},"email":{"value":"asd38@caret.cam.ac.uk"},"firstName":{"value":"firstname01"}}},"rep:userId":"username01","userid":"username01","counts":{"contactsCount":1,"membershipsCount":3,"contentCount":8,"countLastUpdate":1318904551544},"sakai:excludeSearch":false}},{"_lastModifiedBy":"username01","sakai:permissions":"public","_mimeType":"x-sakai/document","_createdBy":"admin","_path":"iOTGdynYaa","sakai:description":"","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-viewer":["anonymous","public-group-member","everyone"],"sakai:pooled-content-manager":["username01","public-group-manager"],"_created":1318866739619,"sakai:pool-content-created-for":"username01","_id":"_rcvMPjXEeCa7Flirf_xsA+","sakai:pooled-content-file-name":"Library","structure0":"{\"library\":{\"_ref\":\"id49119300\",\"_order\":0,\"_nonEditable\":true,\"_title\":\"Library\",\"main\":{\"_ref\":\"id49119300\",\"_order\":0,\"_nonEditable\":true,\"_title\":\"Library\"}}}","_lastModified":1318866740172,"sakai:copyright":"creativecommons","sakai:activity-source":"iOTGdynYaa","sakai:activity-type":"pooled content","sakai:activity-appid":"default","sakai:activity-actor":"username01","sakai:activityMessage":"UPDATED_CONTENT","who":{"hash":"username01","basic":{"access":"everybody","elements":{"lastName":{"value":"Lastname01"},"email":{"value":"asd38@caret.cam.ac.uk"},"firstName":{"value":"firstname01"}}},"rep:userId":"username01","userid":"username01","counts":{"contactsCount":1,"membershipsCount":3,"contentCount":8,"countLastUpdate":1318904551544},"sakai:excludeSearch":false}},{"sling:resourceType":"sakai/group-home","_created":1318866738748,"_id":"_jJHwPjXEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_lastModified":1318866738748,"_createdBy":"admin","_path":"a:public-group","sakai:activity-source":"a:public-group","sakai:activity-type":"group","sakai:activity-appid":"default","sakai:activity-actor":"admin","sakai:activityMessage":"GROUP_CREATED","who":{"hash":"admin","basic":{"access":"everybody","elements":{"lastName":{"value":"User"},"email":{"value":"admin@sakai.invalid"},"firstName":{"value":"Admin"}}},"rep:userId":"admin","userid":"admin","counts":{},"sakai:excludeSearch":false},"profile":{"sakai:category":"group","sakai:group-description":null,"sakai:group-id":"public-group","createdBy":"admin","lastModified":1318904551532,"sakai:group-title":"public group","created":1318866738738,"basic":{"access":"everybody","elements":{"lastName":{"value":"unknown"},"email":{"value":"unknown"},"firstName":{"value":"unknown"}}},"lastModifiedBy":"admin","groupid":"public-group","counts":{"membershipsCount":0,"contentCount":0,"membersCount":1,"countLastUpdate":1318904551532},"sakai:excludeSearch":false}},{"sling:resourceType":"sakai/group-home","_created":1318866738484,"_id":"_gn_QPjXEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_lastModified":1318866738484,"_createdBy":"admin","_path":"a:public-group-member","sakai:activity-source":"a:public-group-member","sakai:activity-type":"group","sakai:activity-appid":"default","sakai:activity-actor":"admin","sakai:activityMessage":"GROUP_CREATED","who":{"hash":"admin","basic":{"access":"everybody","elements":{"lastName":{"value":"User"},"email":{"value":"admin@sakai.invalid"},"firstName":{"value":"Admin"}}},"rep:userId":"admin","userid":"admin","counts":{},"sakai:excludeSearch":false},"profile":{"sakai:category":null,"sakai:group-description":null,"sakai:group-id":"public-group-member","createdBy":"admin","lastModified":1318866740557,"sakai:group-title":"public group (Members)","created":1318866738472,"basic":{"access":"everybody","elements":{"lastName":{"value":"unknown"},"email":{"value":"unknown"},"firstName":{"value":"unknown"}}},"lastModifiedBy":"username01","groupid":"public-group-member","counts":{"membershipsCount":1,"contentCount":2},"sakai:excludeSearch":"true"}},{"sling:resourceType":"sakai/group-home","_created":1318866738204,"_id":"_d9FwPjXEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_lastModified":1318866738204,"_createdBy":"admin","_path":"a:public-group-manager","sakai:activity-source":"a:public-group-manager","sakai:activity-type":"group","sakai:activity-appid":"default","sakai:activity-actor":"admin","sakai:activityMessage":"GROUP_CREATED","who":{"hash":"admin","basic":{"access":"everybody","elements":{"lastName":{"value":"User"},"email":{"value":"admin@sakai.invalid"},"firstName":{"value":"Admin"}}},"rep:userId":"admin","userid":"admin","counts":{},"sakai:excludeSearch":false},"profile":{"sakai:category":null,"sakai:group-description":null,"sakai:group-id":"public-group-manager","createdBy":"admin","lastModified":1318866740568,"sakai:group-title":"public group (Managers)","created":1318866738191,"basic":{"access":"everybody","elements":{"lastName":{"value":"unknown"},"email":{"value":"unknown"},"firstName":{"value":"unknown"}}},"lastModifiedBy":"username01","groupid":"public-group-manager","counts":{"membershipsCount":1,"contentCount":4},"sakai:excludeSearch":"true"}},{"_previousBlockId":"uu8KwPjXEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_previousVersion":"uu2rMPjXEeCa7Flirf_xsA+","_path":"iOOmojoOaa","sakai:fileextension":".jpg","_blockId":"uu8KwPjXEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"_id":"uzALYfjXEeCa7Flirf_xsA+","_bodyCreatedBy":"admin","sakai:pool-content-created-for":"username01","sakai:pooled-content-file-name":"scottishwildcat.jpg","_bodyCreated":1318866625901,"sakai:copyright":"creativecommons","_length":105823,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"image/jpeg","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"uzALYPjXEeCa7Flirf_xsA+","sakai:showcomments":"true","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["username01"],"_created":1318866625891,"_bodyLastModified":1318866625901,"_lastModified":1318866645556,"sakai:pagecount":"1","sakai:hasPreview":"true","_bodyLocation":"2011/9/BG/y2/qf/BGy2qfXufsH7KLWNfavIc_c-1HI","sakai:activity-source":"iOOmojoOaa","sakai:activity-type":"pooled content","sakai:activity-appid":"default","sakai:activity-actor":"admin","sakai:activityMessage":"UPDATED_FILE","who":{"hash":"admin","basic":{"access":"everybody","elements":{"lastName":{"value":"User"},"email":{"value":"admin@sakai.invalid"},"firstName":{"value":"Admin"}}},"rep:userId":"admin","userid":"admin","counts":{},"sakai:excludeSearch":false}},{"_previousBlockId":"uu8KwPjXEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_previousVersion":"uu2rMPjXEeCa7Flirf_xsA+","_path":"iOOmojoOaa","sakai:fileextension":".jpg","_blockId":"uu8KwPjXEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"_id":"uzALYfjXEeCa7Flirf_xsA+","_bodyCreatedBy":"admin","sakai:pool-content-created-for":"username01","sakai:pooled-content-file-name":"scottishwildcat.jpg","_bodyCreated":1318866625901,"sakai:copyright":"creativecommons","_length":105823,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"image/jpeg","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"uzALYPjXEeCa7Flirf_xsA+","sakai:showcomments":"true","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["username01"],"_created":1318866625891,"_bodyLastModified":1318866625901,"_lastModified":1318866645556,"sakai:pagecount":"1","sakai:hasPreview":"true","_bodyLocation":"2011/9/BG/y2/qf/BGy2qfXufsH7KLWNfavIc_c-1HI","sakai:activity-source":"iOOmojoOaa","sakai:activity-type":"pooled content","sakai:activity-appid":"default","sakai:activity-actor":"admin","sakai:activityMessage":"UPDATED_FILE","who":{"hash":"admin","basic":{"access":"everybody","elements":{"lastName":{"value":"User"},"email":{"value":"admin@sakai.invalid"},"firstName":{"value":"Admin"}}},"rep:userId":"admin","userid":"admin","counts":{},"sakai:excludeSearch":false}},{"_previousBlockId":"uu8KwPjXEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_previousVersion":"uu2rMPjXEeCa7Flirf_xsA+","_path":"iOOmojoOaa","sakai:fileextension":".jpg","_blockId":"uu8KwPjXEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"_id":"uzALYfjXEeCa7Flirf_xsA+","_bodyCreatedBy":"admin","sakai:pool-content-created-for":"username01","sakai:pooled-content-file-name":"scottishwildcat.jpg","_bodyCreated":1318866625901,"sakai:copyright":"creativecommons","_length":105823,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"image/jpeg","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"uzALYPjXEeCa7Flirf_xsA+","sakai:showcomments":"true","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["username01"],"_created":1318866625891,"_bodyLastModified":1318866625901,"_lastModified":1318866645556,"sakai:pagecount":"1","sakai:hasPreview":"true","_bodyLocation":"2011/9/BG/y2/qf/BGy2qfXufsH7KLWNfavIc_c-1HI","sakai:activity-source":"iOOmojoOaa","sakai:activity-type":"pooled content","sakai:activity-appid":"default","sakai:activity-actor":"username01","sakai:activityMessage":"CREATED_FILE","who":{"hash":"username01","basic":{"access":"everybody","elements":{"lastName":{"value":"Lastname01"},"email":{"value":"asd38@caret.cam.ac.uk"},"firstName":{"value":"firstname01"}}},"rep:userId":"username01","userid":"username01","counts":{"contactsCount":1,"membershipsCount":3,"contentCount":8,"countLastUpdate":1318904551544},"sakai:excludeSearch":false}},{"sling:resourceType":"sakai/user-home","_created":1318866565670,"_id":"lwisYPjXEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_lastModified":1318866565670,"_createdBy":"admin","_path":"a:username01","sakai:activity-source":"a:username01","sakai:activity-type":"user","sakai:activity-appid":"default","sakai:activity-actor":"admin","sakai:activityMessage":"USER_CREATED","who":{"hash":"admin","basic":{"access":"everybody","elements":{"lastName":{"value":"User"},"email":{"value":"admin@sakai.invalid"},"firstName":{"value":"Admin"}}},"rep:userId":"admin","userid":"admin","counts":{},"sakai:excludeSearch":false}}],"total":198} \ No newline at end of file diff --git a/extensions/app/static/var/search/public/random-content.json b/extensions/app/static/var/search/public/random-content.json new file mode 100644 index 00000000..f5e20e00 --- /dev/null +++ b/extensions/app/static/var/search/public/random-content.json @@ -0,0 +1 @@ +{"items":10,"results":[{"_previousBlockId":"Bun6MPWeEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_previousVersion":"Buc7EPWeEeCa7Flirf_xsA+","_path":"ibtAKSqCmg","sakai:fileextension":".jpg","_blockId":"Bun6MPWeEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"_id":"Bw388fWeEeCa7Flirf_xsA+","_bodyCreatedBy":"admin","sakai:pool-content-created-for":"jrf","sakai:pooled-content-file-name":"AAR Logo.jpg","_bodyCreated":1318511989076,"sakai:copyright":"creativecommons","_length":11444,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"image/jpeg","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"Bw388PWeEeCa7Flirf_xsA+","sakai:showcomments":"true","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["jrf"],"_created":1318511989057,"_bodyLastModified":1318511989076,"_lastModified":1318515175689,"sakai:pagecount":"1","sakai:hasPreview":"true","_bodyLocation":"2011/9/vH/4H/7H/vH4H7H4xXl0Gk0x4TOCEBH27RiU","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-13T14:12:55+00:00","jcr:mimeType":"image/jpeg","jcr:data":11444},{"_previousBlockId":"cNoLAPUOEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_previousVersion":"cNj5kPUOEeCa7Flirf_xsA+","sakai:fileextension":".jpg","_path":"i6asiWyeie","_blockId":"cNoLAPUOEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone","Jack-Fitzmier-s-Biospace"],"sakai:pool-content-created-for":"jrf","_bodyCreatedBy":"admin","_id":"cQ4TUfUOEeCa7Flirf_xsA+","sakai:pooled-content-file-name":"FItzmier Photo.jpg","_bodyCreated":1318450319292,"sakai:copyright":"creativecommons","_length":73667,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"image/jpeg","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"cQ4TUPUOEeCa7Flirf_xsA+","sakai:showcomments":"true","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["jrf"],"_created":1318450319273,"_bodyLastModified":1318450319292,"_lastModified":1318515175433,"sakai:pagecount":"1","sakai:hasPreview":"true","_bodyLocation":"2011/9/FH/ie/jr/FHiejr-KUGHl0RwbQCjGgYGt1rU","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-13T14:12:55+00:00","jcr:mimeType":"image/jpeg","jcr:data":73667},{"_previousBlockId":"bkcdMPJzEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_previousVersion":"bkWWkPJzEeCa7Flirf_xsA+","_path":"iBgksMNoaa","sakai:fileextension":".pdf","_blockId":"bkcdMPJzEeCa7Flirf_xsA+","sakai:description":"New York Central, Michigan Division Map (1943)","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"_id":"cItn4fJzEeCa7Flirf_xsA+","_bodyCreatedBy":"admin","sakai:pool-content-created-for":"arwhyte","sakai:pooled-content-file-name":"nyc-division-map-michigan-1943.pdf","_bodyCreated":1318163840649,"sakai:copyright":"nocopyright","_length":1464769,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"application/pdf","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"cItn4PJzEeCa7Flirf_xsA+","sling:resourceType":"sakai/pooled-content","sakai:showcomments":"true","sakai:pooled-content-manager":["arwhyte"],"_created":1318163840633,"_bodyLastModified":1318163840649,"_lastModified":1318342585749,"sakai:pagecount":"1","sakai:hasPreview":"true","_bodyLocation":"2011/9/Zt/RY/yl/ZtRYylTYtKXmJiPTRrhwd39q6H4","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-11T14:16:25+00:00","jcr:mimeType":"application/pdf","jcr:data":1464769},{"_previousBlockId":"077goPEEEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_previousVersion":"073PMPEEEeCa7Flirf_xsA+","_path":"ijomhKc9ie","sakai:fileextension":".docx","_blockId":"077goPEEEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"_id":"0-d3QfEEEeCa7Flirf_xsA+","_bodyCreatedBy":"admin","sakai:pool-content-created-for":"wc877","sakai:pooled-content-file-name":"2_Stix_main_menu.docx","_bodyCreated":1318006385579,"sakai:copyright":"creativecommons","_length":57391,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"application/vnd.openxmlformats-officedocument.wordprocessingml.document","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"0-d3QPEEEeCa7Flirf_xsA+","sakai:showcomments":"true","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["wc877"],"_created":1318006385571,"_bodyLastModified":1318006385579,"_lastModified":1318006402834,"sakai:pagecount":"4","sakai:hasPreview":"true","_bodyLocation":"2011/9/iw/bW/-L/iwbW-LfctuVXrWmyVoC8XHYi2Nk","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-07T16:53:22+00:00","jcr:mimeType":"application/vnd.openxmlformats-officedocument.wordprocessingml.document","jcr:data":57391},{"_previousBlockId":"wL9xkPEEEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_previousVersion":"wL5gIPEEEeCa7Flirf_xsA+","_path":"ijowQ1Aaa","sakai:fileextension":".doc","_blockId":"wL9xkPEEEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"_id":"wOKxAfEEEeCa7Flirf_xsA+","_bodyCreatedBy":"admin","sakai:pool-content-created-for":"wc877","sakai:pooled-content-file-name":"10_things_to_do_with_chicken_breasts_4.26.doc","_bodyCreated":1318006353706,"sakai:copyright":"creativecommons","_length":54272,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"application/msword","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"wOKxAPEEEeCa7Flirf_xsA+","sakai:showcomments":"true","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["wc877"],"_created":1318006353698,"_bodyLastModified":1318006353706,"_lastModified":1318006376288,"sakai:pagecount":"2","sakai:hasPreview":"true","_bodyLocation":"2011/9/oP/D7/Fa/oPD7FaJaecpdOwZjyasbJMMHClY","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-07T16:52:56+00:00","jcr:mimeType":"application/msword","jcr:data":54272},{"_previousBlockId":"BDl2APEFEeCa7Flirf_xsA+","_lastModifiedBy":"admin","_previousVersion":"BDg9gPEFEeCa7Flirf_xsA+","_path":"ijs3SFyaa","sakai:fileextension":".jpg","_blockId":"BDl2APEFEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"_id":"BH7jcfEFEeCa7Flirf_xsA+","_bodyCreatedBy":"admin","sakai:pool-content-created-for":"wc877","sakai:pooled-content-file-name":"2.jpg","_bodyCreated":1318006466913,"sakai:copyright":"creativecommons","_length":54947,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"image/jpeg","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"BH7jcPEFEeCa7Flirf_xsA+","sakai:showcomments":"true","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["wc877"],"_created":1318006466904,"_bodyLastModified":1318006466913,"_lastModified":1318006483807,"sakai:pagecount":"1","sakai:hasPreview":"true","_bodyLocation":"2011/9/Xi/YO/3Z/XiYO3Z3XOpwga8kiGuLlzexLITo","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-07T16:54:43+00:00","jcr:mimeType":"image/jpeg","jcr:data":54947},{"_lastModifiedBy":"admin","_previousBlockId":"bk5wMPJzEeCa7Flirf_xsA+","_previousVersion":"bkoqcPJzEeCa7Flirf_xsA+","sakai:fileextension":".pdf","_path":"iBgkeB8DKs","sakai:description":"Michigan Department of Transportation, \"Michigan's Railroad System\" (2009)","_blockId":"bk5wMPJzEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"sakai:pool-content-created-for":"arwhyte","_bodyCreatedBy":"admin","_id":"cI51IfJzEeCa7Flirf_xsA+","sakai:pooled-content-file-name":"MDOT_Official_Rail_130897_7.pdf","_bodyCreated":1318163840701,"sakai:copyright":"copyrighted","_length":1098063,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"application/pdf","_bodyLastModifiedBy":"admin","_createdBy":"admin","sakai:tag-uuid":["88b780ad-c22e-4a62-971e-57769442c949"],"_versionHistoryId":"cI51IPJzEeCa7Flirf_xsA+","sakai:tags":["railroads michigan maps"],"sling:resourceType":"sakai/pooled-content","sakai:showcomments":"true","_created":1318163840663,"sakai:pooled-content-manager":["arwhyte"],"_bodyLastModified":1318163840701,"_lastModified":1318342593231,"sakai:pagecount":"1","sakai:hasPreview":"true","_bodyLocation":"2011/9/Dg/MR/hS/DgMRhScSPjJs7J5xJfn8xW_P_pU","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-11T14:16:33+00:00","jcr:mimeType":"application/pdf","jcr:data":1098063},{"_lastModifiedBy":"admin","_previousBlockId":"Qfsv8PELEeCa7Flirf_xsA+","_previousVersion":"QfiX4PELEeCa7Flirf_xsA+","sakai:fileextension":".jpg","_path":"ij5P1g7aa","sakai:description":"funky chicken","_blockId":"Qfsv8PELEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone"],"sakai:pool-content-created-for":"johnfking2","_bodyCreatedBy":"admin","_id":"Qr2VkfELEeCa7Flirf_xsA+","sakai:pooled-content-file-name":"7-imp-chicken2.jpg","_bodyCreated":1318009147504,"sakai:copyright":"creativecommons","_length":63138,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"image/jpeg","_bodyLastModifiedBy":"admin","_createdBy":"admin","sakai:tag-uuid":["3114ef47-c90e-498e-97e0-d003c2f51ce9"],"_versionHistoryId":"Qr2VkPELEeCa7Flirf_xsA+","sakai:tags":["chicken"],"sling:resourceType":"sakai/pooled-content","sakai:showcomments":"true","_created":1318009147486,"sakai:pooled-content-manager":["johnfking2"],"_bodyLastModified":1318009147504,"_lastModified":1318009155628,"sakai:pagecount":"1","sakai:hasPreview":"true","_bodyLocation":"2011/9/e2/1Z/2p/e21Z2pbrNL-HpPRWZGhPJCwQPhU","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-07T17:39:15+00:00","jcr:mimeType":"image/jpeg","jcr:data":63138},{"_lastModifiedBy":"admin","_previousBlockId":"BgXRkPEFEeCa7Flirf_xsA+","_previousVersion":"BgRyAPEFEeCa7Flirf_xsA+","sakai:fileextension":".png","_path":"ijsuxuLaa","sakai:description":"it's a nice old airmail stamp","_blockId":"BgXRkPEFEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone","chris"],"sakai:pool-content-created-for":"zach","_bodyCreatedBy":"admin","_id":"BiZ48fEFEeCa7Flirf_xsA+","sakai:pooled-content-file-name":"airplane-stamp.png","_bodyCreated":1318006469959,"sakai:copyright":"creativecommons","_length":521504,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"image/png","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"BiZ48PEFEeCa7Flirf_xsA+","sling:resourceType":"sakai/pooled-content","sakai:showcomments":"true","_created":1318006469920,"sakai:pooled-content-manager":["zach"],"_bodyLastModified":1318006469959,"_lastModified":1318006545009,"sakai:pagecount":"1","_bodyLocation":"2011/9/fw/dj/P2/fwdjP2391y6xnDgRy7iQ73q0w18","sakai:hasPreview":"true","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-07T16:55:45+00:00","jcr:mimeType":"image/png","jcr:data":521504},{"_lastModifiedBy":"admin","_previousBlockId":"4_bLQPEEEeCa7Flirf_xsA+","_previousVersion":"4_WSwPEEEeCa7Flirf_xsA+","sakai:fileextension":".docx","_path":"ijsqqIGaa","_blockId":"4_bLQPEEEeCa7Flirf_xsA+","sakai:allowcomments":"true","sakai:pooled-content-viewer":["anonymous","everyone","jrf"],"sakai:pool-content-created-for":"wc877","_bodyCreatedBy":"admin","_id":"5BdLkvEEEeCa7Flirf_xsA+","sakai:pooled-content-file-name":"2010-April-front.docx","_bodyCreated":1318006412789,"sakai:copyright":"creativecommons","_length":66863,"sakai:needsprocessing":"false","sakai:permissions":"public","_mimeType":"application/vnd.openxmlformats-officedocument.wordprocessingml.document","_bodyLastModifiedBy":"admin","_createdBy":"admin","_versionHistoryId":"5BdLkfEEEeCa7Flirf_xsA+","sakai:showcomments":"true","sling:resourceType":"sakai/pooled-content","sakai:pooled-content-manager":["wc877"],"_created":1318006412780,"_bodyLastModified":1318006412789,"_lastModified":1318259052608,"sakai:pagecount":"2","_bodyLocation":"2011/9/b9/zQ/JC/b9zQJCYC87bgHSrWZy1iBs5xwik","sakai:hasPreview":"true","permissions":{"set_property":false,"read":true,"remove":false},"jcr:lastModified":"2011-10-10T15:04:12+00:00","jcr:mimeType":"application/vnd.openxmlformats-officedocument.wordprocessingml.document","jcr:data":66863}],"total":10} \ No newline at end of file diff --git a/extensions/app/static/var/widgets.json b/extensions/app/static/var/widgets.json new file mode 100644 index 00000000..af378f3f --- /dev/null +++ b/extensions/app/static/var/widgets.json @@ -0,0 +1 @@ +define({"discussion":{"enabled":true,"groupdashboard":true,"img":"/devwidgets/discussion/images/discussion.png","type":"sakai","url":"/devwidgets/discussion/discussion.html","grouppages":true,"id":"discussion","showinsakaigoodies":true,"description":"Discussion widget","hasSettings":true,"name":"Discussion","userpages":true,"i18n":{"default":"/devwidgets/discussion/bundles/default.properties","en_US":"/devwidgets/discussion/bundles/en_US.properties","zh_CN":"/devwidgets/discussion/bundles/zh_CN.properties"},"userdashboard":true,"sakaidocs":true},"savecontent":{"id":"savecontent","enabled":true,"hasSettings":false,"description":"Saves content into the users library","name":"Savecontent","i18n":{"default":"/devwidgets/savecontent/bundles/default.properties"},"type":"sakai","url":"/devwidgets/savecontent/savecontent.html"},"remotecontent":{"enabled":true,"groupdashboard":true,"img":"/devwidgets/remotecontent/images/remotecontent.png","defaultConfiguration":{"remotecontent":{"border_color":"cccccc","border_size":0,"height":"800","url":"http://sakaiproject.org","width":"100","width_unit":"%"}},"type":"sakai","url":"/devwidgets/remotecontent/remotecontent.html","grouppages":true,"id":"remotecontent","showinsakaigoodies":true,"description":"Remote Content","hasSettings":true,"name":"Remote Content","userpages":true,"i18n":{"default":"/devwidgets/remotecontent/bundles/default.properties","en_US":"/devwidgets/remotecontent/bundles/en_US.properties"},"userdashboard":true,"sakaidocs":true},"mylibrary":{"enabled":true,"groupdashboard":false,"type":"sakai","url":"/devwidgets/mylibrary/mylibrary.html","grouppages":true,"id":"mylibrary","showinsakaigoodies":true,"description":"View and manage a user's library items","hasSettings":false,"name":"My Library","userpages":false,"i18n":{"default":"/devwidgets/mylibrary/bundles/default.properties"},"personalportal":false,"userdashboard":false},"participants":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/participants/participants.html","grouppages":true,"id":"participants","showinsakaigoodies":true,"description":"List Participants of a Group","hasSettings":false,"name":"Participants","userpages":false,"i18n":{"default":"/devwidgets/participants/bundles/default.properties"},"personalportal":false,"userdashboard":false},"worldsettings":{"enabled":true,"ca":false,"groupdashboard":false,"type":"core","url":"/devwidgets/worldsettings/worldsettings.html","grouppages":false,"id":"worldsettings","showinsakaigoodies":false,"description":"Edit World Settings","hasSettings":false,"name":"World Settings","userpages":false,"i18n":{"default":"/devwidgets/worldsettings/bundles/default.properties"},"userdashboard":false,"personalportal":false,"sakaidocs":false},"help":{"id":"help","enabled":true,"hasSettings":false,"description":"Help","name":"Help","i18n":{"default":"/devwidgets/help/bundles/default.properties"},"type":"sakai","url":"/devwidgets/help/help.html"},"listpeople":{"id":"listpeople","enabled":true,"hasSettings":false,"description":"General people lister widget","name":"People Lister","i18n":{"default":"/devwidgets/listpeople/bundles/default.properties"},"type":"core","subNameInfoContent":"mimeTypeDescripton","url":"/devwidgets/listpeople/listpeople.html"},"searchsakai2":{"id":"searchsakai2","showinsakaigoodies":false,"enabled":false,"hasSettings":false,"description":"Search Sakai 2","name":"Search Sakai 2","i18n":{"default":"/devwidgets/searchsakai2/bundles/default.properties"},"personalportal":false,"sakaidocs":false,"type":"core","url":"/devwidgets/searchsakai2/searchsakai2.html"},"recentmemberships":{"id":"recentmemberships","enabled":true,"hasSettings":false,"description":"My recent memberships","name":"My recent memberships","deletable":true,"i18n":{"default":"/devwidgets/recentmemberships/bundles/default.properties"},"personalportal":true,"type":"sakai","url":"/devwidgets/recentmemberships/recentmemberships.html"},"userpermissions":{"enabled":true,"groupdashboard":false,"type":"core","url":"/devwidgets/userpermissions/userpermissions.html","grouppages":false,"id":"userpermissions","showinsakaigoodies":false,"description":"User Permissions","hasSettings":false,"name":"User Permissions","userpages":false,"i18n":{"default":"/devwidgets/userpermissions/bundles/default.properties"},"personalportal":false,"userdashboard":false},"video":{"enabled":true,"groupdashboard":true,"showinmedia":true,"img":"/devwidgets/video/images/video.png","type":"sakai","url":"/devwidgets/video/video.html","grouppages":true,"id":"video","indexFields":["source","title","url"],"description":"Video","hasSettings":true,"name":"Video","i18n":{"default":"/devwidgets/video/bundles/default.properties"},"userpages":true,"userdashboard":true},"contentpreview":{"id":"contentpreview","enabled":true,"hasSettings":false,"description":"Content Preview","name":"Content Preview","i18n":{"default":"/devwidgets/contentpreview/bundles/default.properties"},"type":"core","url":"/devwidgets/contentpreview/contentpreview.html"},"mycontent":{"id":"mycontent","enabled":true,"hasSettings":false,"description":"My content","name":"My content","deletable":true,"i18n":{"default":"/devwidgets/mycontent/bundles/default.properties"},"personalportal":true,"type":"sakai","url":"/devwidgets/mycontent/mycontent.html"},"allcategories":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/allcategories/allcategories.html","grouppages":false,"id":"allcategories","showinsakaigoodies":false,"description":"All Categories Widget","hasSettings":true,"name":"All Categories Widget","userpages":false,"i18n":{"default":"/devwidgets/allcategories/bundles/default.properties"},"personalportal":false,"userdashboard":false},"activegroups":{"enabled":true,"groupdashboard":false,"type":"sakai","url":"/devwidgets/activegroups/activegroups.html","grouppages":false,"id":"activegroups","description":"Most active groups","hasSettings":false,"name":"Most active groups","userpages":false,"i18n":{"default":"/devwidgets/activegroups/bundles/default.properties"},"personalportal":true,"userdashboard":false},"profilesection":{"id":"profilesection","enabled":true,"description":"Show a profile section","name":"Profile Section","i18n":{"default":"/devwidgets/profilesection/bundles/default.properties"},"type":"core","url":"/devwidgets/profilesection/profilesection.html"},"popularcontent":{"enabled":true,"groupdashboard":false,"type":"sakai","url":"/devwidgets/popularcontent/popularcontent.html","grouppages":false,"id":"popularcontent","description":"Most active content","hasSettings":false,"name":"Most active content","userpages":false,"i18n":{"default":"/devwidgets/popularcontent/bundles/default.properties"},"personalportal":true,"userdashboard":false},"relatedcontent":{"id":"relatedcontent","enabled":true,"hasSettings":false,"description":"Related Content","name":"Related Content","i18n":{"default":"/devwidgets/relatedcontent/bundles/default.properties"},"type":"core","url":"/devwidgets/relatedcontent/relatedcontent.html"},"dashboard":{"id":"dashboard","enabled":true,"showinsakaigoodies":false,"ca":true,"hasSettings":true,"description":"Dashboard","name":"Dashboard","img":"/devwidgets/comments/images/comments.png","siteportal":false,"i18n":{"default":"/devwidgets/dashboard/bundles/default.properties"},"type":"core","url":"/devwidgets/dashboard/dashboard.html"},"userprofile":{"id":"userprofile","enabled":true,"showinsakaigoodies":false,"ca":false,"hasSettings":false,"description":"User Profile","name":"User Profile","siteportal":false,"type":"core","url":"/devwidgets/userprofile/userprofile.html"},"pickeradvanced":{"id":"pickeradvanced","enabled":true,"hasSettings":false,"description":"Advanced Search Picker Widget","name":"Advanced Search Picker","i18n":{"default":"/devwidgets/pickeradvanced/bundles/default.properties"},"type":"core","url":"/devwidgets/pickeradvanced/pickeradvanced.html"},"sakai2favourites":{"id":"sakai2favourites","enabled":true,"i18n":{"default":"/devwidgets/sakai2favourites/bundles/default.properties"},"type":"core","url":"/devwidgets/sakai2favourites/sakai2favourites.html"},"faceted":{"id":"faceted","enabled":true,"name":"Faceted","i18n":{"default":"/devwidgets/faceted/bundles/default.properties"},"type":"core","url":"/devwidgets/faceted/faceted.html"},"sharecontent":{"id":"sharecontent","enabled":true,"hasSettings":false,"description":"Share content widget","name":"Share Content","i18n":{"default":"/devwidgets/sharecontent/bundles/default.properties"},"type":"core","url":"/devwidgets/sharecontent/sharecontent.html"},"inbox":{"id":"inbox","enabled":true,"hashParams":["message","newmessage","reply","iq"],"description":"Inbox","name":"inbox","i18n":{"default":"/devwidgets/inbox/bundles/default.properties"},"personalportal":false,"type":"sakai","url":"/devwidgets/inbox/inbox.html"},"listgeneral":{"id":"listgeneral","enabled":true,"hasSettings":false,"description":"General space, site and content lister widget","name":"General Lister","i18n":{"default":"/devwidgets/listgeneral/bundles/default.properties"},"type":"core","url":"/devwidgets/listgeneral/listgeneral.html"},"filerevisions":{"id":"filerevisions","enabled":true,"description":"View file revisions","i18n":{"default":"/devwidgets/filerevisions/bundles/default.properties","en_GB":"/devwidgets/filerevisions/bundles/en_GB.properties","nl_NL":"/devwidgets/filerevisions/bundles/nl_NL.properties"},"type":"core","url":"/devwidgets/filerevisions/filerevisions.html"},"mysakai2":{"id":"mysakai2","enabled":true,"hasSettings":false,"description":"My Sakai 2 favourites","name":"My Sakai 2 favourites","i18n":{"default":"/devwidgets/mysakai2/bundles/default.properties"},"personalportal":true,"type":"sakai","url":"/devwidgets/mysakai2/mysakai2.html"},"fileupload":{"id":"fileupload","enabled":true,"description":"Add Files","name":"fileupload","i18n":{"default":"/devwidgets/fileupload/bundles/default.properties","en_US":"/devwidgets/fileupload/bundles/en_US.properties","nl_NL":"/devwidgets/fileupload/bundles/nl_NL.properties"},"type":"core","url":"/devwidgets/fileupload/fileupload.html"},"carousel":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/carousel/carousel.html","grouppages":false,"id":"carousel","showinsakaigoodies":false,"description":"Carousel widget","hasSettings":true,"name":"Carousel","userpages":false,"i18n":{"default":"/devwidgets/carousel/bundles/default.properties"},"personalportal":false,"userdashboard":false},"googlemaps":{"enabled":true,"groupdashboard":true,"img":"/devwidgets/googlemaps/images/googlemaps.png","type":"sakai","url":"/devwidgets/googlemaps/googlemaps.html","grouppages":true,"id":"googlemaps","indexFields":["mapinput","maphtml"],"defaultLng":0.1312368,"showinsakaigoodies":true,"hasSettings":true,"description":"Google maps","name":"Google maps","userpages":true,"i18n":{"default":"/devwidgets/googlemaps/bundles/default.properties","en_US":"/devwidgets/googlemaps/bundles/en_US.properties","zh_CN":"/devwidgets/googlemaps/bundles/zh_CN.properties"},"userdashboard":true,"sakaidocs":true,"defaultZoom":8,"defaultLat":52.2025441},"helloworld":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/helloworld/helloworld.html","grouppages":false,"id":"helloworld","showinsakaigoodies":false,"description":"Sakai OAE Widget SDK Demonstration Widget","hasSettings":true,"name":"Hello World","userpages":false,"i18n":{"default":"/devwidgets/helloworld/bundles/default.properties"},"personalportal":false,"userdashboard":false},"tags":{"enabled":true,"groupdashboard":false,"type":"sakai","url":"/devwidgets/tags/tags.html","grouppages":false,"id":"tags","description":"Most popular tags","hasSettings":false,"name":"Most popular tags","userpages":false,"i18n":{"default":"/devwidgets/tags/bundles/default.properties"},"personalportal":true,"userdashboard":false},"newsharecontent":{"id":"newsharecontent","enabled":true,"hasSettings":false,"description":"Share content widget","name":"Share Content","i18n":{"default":"/devwidgets/newsharecontent/bundles/default.properties"},"defaultConfiguration":{"newsharecontent":{"addThisAccountId":"xa-4db72a071927628b"}},"type":"core","url":"/devwidgets/newsharecontent/newsharecontent.html"},"newcreategroup":{"id":"newcreategroup","enabled":true,"description":"Create Group","name":"Create Group","i18n":{"default":"/devwidgets/newcreategroup/bundles/default.properties","en_US":"/devwidgets/newcreategroup/bundles/en_US.properties"},"type":"core","url":"/devwidgets/newcreategroup/newcreategroup.html"},"sendmessage":{"id":"sendmessage","enabled":true,"description":"Send a message","name":"Send a message","i18n":{"default":"/devwidgets/sendmessage/bundles/default.properties","zh_CN":"/devwidgets/sendmessage/bundles/zh_CN.properties"},"type":"core","url":"/devwidgets/sendmessage/sendmessage.html"},"systemtour":{"enabled":true,"groupdashboard":false,"siteportal":false,"type":"sakai","url":"/devwidgets/systemtour/systemtour.html","grouppages":false,"id":"systemtour","showinsakaigoodies":false,"description":"Tracks System Tour Progress","hasSettings":false,"name":"System Tour","userpages":false,"i18n":{"default":"/devwidgets/systemtour/bundles/default.properties"},"userdashboard":false},"listpeopleinnode":{"id":"listpeopleinnode","enabled":true,"hasSettings":true,"description":"Browsing people in this node of the directory","name":"listpeopleinnode","i18n":{"default":"/devwidgets/listpeopleinnode/bundles/default.properties"},"type":"core","url":"/devwidgets/listpeopleinnode/listpeopleinnode.html"},"newaddcontent":{"id":"newaddcontent","enabled":true,"showinsakaigoodies":true,"ca":true,"description":"Add Resource to a Site","name":"Add Resource","i18n":{"default":"/devwidgets/newaddcontent/bundles/default.properties"},"type":"core","url":"/devwidgets/newaddcontent/newaddcontent.html"},"footer":{"id":"footer","enabled":true,"description":"Dynamic Footer with Debug Info","name":"Dynamic Footer","i18n":{"default":"/devwidgets/footer/bundles/default.properties","en_US":"/devwidgets/footer/bundles/en_US.properties","zh_CN":"/devwidgets/footer/bundles/zh_CN.properties"},"type":"core","url":"/devwidgets/footer/footer.html"},"embedcontent":{"enabled":true,"img":"/devwidgets/embedcontent/images/content.png","type":"core","url":"/devwidgets/embedcontent/embedcontent.html","grouppages":true,"id":"embedcontent","indexFields":["title","description"],"description":"Embed Content on a Page","hasSettings":true,"name":"Files and documents","userpages":true,"i18n":{"default":"/devwidgets/embedcontent/bundles/default.properties"},"sakaidocs":true,"settingsWidth":785},"assignlocation":{"id":"assignlocation","enabled":true,"hasSettings":false,"description":"Assign a directory location to an entity","name":"Assign Location","i18n":{"default":"/devwidgets/assignlocation/bundles/default.properties"},"type":"core","url":"/devwidgets/assignlocation/assignlocation.html"},"addarea":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/addarea/addarea.html","grouppages":false,"id":"addarea","showinsakaigoodies":false,"description":"Add Area Widget","hasSettings":true,"name":"Add Area Widget","userpages":false,"i18n":{"default":"/devwidgets/addarea/bundles/default.properties"},"personalportal":false,"userdashboard":false},"searchcontent":{"enabled":false,"groupdashboard":false,"type":"core","url":"/devwidgets/searchcontent/searchcontent.html","grouppages":false,"id":"searchcontent","showinsakaigoodies":false,"description":"Search Content","hasSettings":false,"name":"Search Content","userpages":false,"i18n":{"default":"/devwidgets/searchcontent/bundles/default.properties"},"personalportal":false,"userdashboard":false},"captcha":{"id":"captcha","enabled":true,"description":"Display a captcha widget","name":"Captcha Widget","i18n":{"default":"/devwidgets/captcha/bundles/default.properties"},"type":"core","url":"/devwidgets/captcha/captcha.html"},"comments":{"enabled":true,"groupdashboard":true,"img":"/devwidgets/comments/images/comments.png","siteportal":true,"type":"sakai","url":"/devwidgets/comments/comments.html","grouppages":true,"id":"comments","showinsakaigoodies":true,"description":"Comments","hasSettings":true,"name":"Comments","userpages":true,"i18n":{"default":"/devwidgets/comments/bundles/default.properties"},"userdashboard":true,"sakaidocs":true},"deletecontent":{"id":"deletecontent","enabled":true,"description":"Delete content widget","name":"Delete Content","i18n":{"default":"/devwidgets/deletecontent/bundles/default.properties"},"type":"core","url":"/devwidgets/deletecontent/deletecontent.html"},"featuredpeople":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/featuredpeople/featuredpeople.html","grouppages":false,"id":"featuredpeople","showinsakaigoodies":false,"description":"Featured People Widget","hasSettings":true,"name":"Featured People Widget","userpages":false,"i18n":{"default":"/devwidgets/featuredpeople/bundles/default.properties"},"personalportal":false,"userdashboard":false},"featuredcontent":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/featuredcontent/featuredcontent.html","grouppages":false,"id":"featuredcontent","showinsakaigoodies":false,"description":"Featured Content Widget","hasSettings":true,"name":"Featured Content Widget","userpages":false,"i18n":{"default":"/devwidgets/featuredcontent/bundles/default.properties"},"personalportal":false,"userdashboard":false},"contentpermissions":{"id":"contentpermissions","enabled":true,"hasSettings":false,"description":"Content permissions widget","name":"Content Permissions","i18n":{"default":"/devwidgets/contentpermissions/bundles/default.properties"},"type":"core","url":"/devwidgets/contentpermissions/contentpermissions.html"},"joinrequestbuttons":{"id":"joinrequestbuttons","enabled":true,"hasSettings":false,"description":"Display join/leave/pending buttons to match the context","name":"Join Request Buttons","i18n":{"default":"/devwidgets/joinrequestbuttons/bundles/default.properties"},"personalportal":false,"type":"core","url":"/devwidgets/joinrequestbuttons/joinrequestbuttons.html"},"_template":{"id":"WIDGET_ID","showinsakaigoodies":false,"enabled":false,"hasSettings":false,"description":"WIDGET DESCRIPTION","name":"WIDGET NAME","img":"/devwidgets/WIDGET_ID/images/WIDGET_ID.png","i18n":{"default":"/devwidgets/WIDGET_ID/bundles/default.properties"},"personalportal":false,"sakaidocs":false,"type":"contrib","url":"/devwidgets/WIDGET_ID/WIDGET_ID.html"},"jisccontent":{"enabled":true,"groupdashboard":false,"img":"/devwidgets/jisccontent/images/jisccontent.png","type":"contrib","url":"/devwidgets/jisccontent/jisccontent.html","grouppages":false,"id":"jisccontent","showinsakaigoodies":true,"description":"JISC Content","hasSettings":false,"name":"JISC Content","userpages":false,"i18n":{"default":"/devwidgets/jisccontent/bundles/default.properties"},"userdashboard":false,"personalportal":true,"sakaidocs":true,"settingsWidth":256},"entity":{"id":"entity","enabled":true,"i18n":{"default":"/devwidgets/entity/bundles/default.properties"},"type":"core","url":"/devwidgets/entity/entity.html"},"areapermissions":{"enabled":true,"groupdashboard":false,"type":"core","url":"/devwidgets/areapermissions/areapermissions.html","grouppages":false,"id":"areapermissions","showinsakaigoodies":false,"description":"Area Permissions","hasSettings":false,"name":"Area Permissions","userpages":false,"i18n":{"default":"/devwidgets/areapermissions/bundles/default.properties"},"personalportal":false,"userdashboard":false},"contentmetadata":{"id":"contentmetadata","enabled":true,"i18n":{"default":"/devwidgets/contentmetadata/bundles/default.properties"},"type":"core","url":"/devwidgets/contentmetadata/contentmetadata.html"},"contacts":{"id":"contacts","enabled":true,"description":"Contacts","name":"contacts","i18n":{"default":"/devwidgets/contacts/bundles/default.properties"},"personalportal":false,"type":"sakai","url":"/devwidgets/contacts/contacts.html"},"institutionalskinning":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/institutionalskinning/institutionalskinning.html","grouppages":false,"id":"institutionalskinning","showinsakaigoodies":false,"description":"Institutional Skinning Widget","hasSettings":true,"name":"Institutional Skinning","userpages":false,"i18n":{"default":"/devwidgets/institutionalskinning/bundles/default.properties"},"personalportal":false,"userdashboard":false},"poll":{"enabled":true,"groupdashboard":false,"img":"/devwidgets/poll/images/poll.png","type":"sakai","url":"/devwidgets/poll/poll.html","grouppages":false,"id":"poll","showinsakaigoodies":true,"description":"Poll widget","name":"Poll","i18n":{"default":"/devwidgets/poll/bundles/default.properties"},"userpages":false,"userdashboard":false},"joingroup":{"id":"joingroup","enabled":true,"description":"Join Group Overlay","name":"Join Group Overlay","i18n":{"default":"/devwidgets/joingroup/bundles/default.properties"},"personalportal":false,"type":"sakai","url":"/devwidgets/joingroup/joingroup.html"},"mycontacts":{"id":"mycontacts","enabled":true,"description":"My contacts","name":"My contacts","i18n":{"default":"/devwidgets/mycontacts/bundles/default.properties","en_US":"/devwidgets/mycontacts/bundles/en_US.properties","zh_CN":"/devwidgets/mycontacts/bundles/zh_CN.properties"},"personalportal":true,"type":"sakai","multipleinstance":false,"url":"/devwidgets/mycontacts/mycontacts.html"},"rss":{"enabled":true,"groupdashboard":true,"img":"/devwidgets/rss/images/rss.png","defaultConfiguration":{"rss":{"displayHeadlines":false,"displaySource":false,"feeds":[],"numEntries":25,"title":"The Sakai Project","urlFeeds":["http://sakaiproject.org/rss.xml"]}},"type":"sakai","url":"/devwidgets/rss/rss.html","grouppages":true,"id":"rss","indexFields":["title"],"showinsakaigoodies":true,"description":"RSS Feed Reader","hasSettings":true,"name":"RSS Feed Reader","i18n":{"default":"/devwidgets/rss/bundles/default.properties","en_US":"/devwidgets/rss/bundles/en_US.properties","nl_NL":"/devwidgets/rss/bundles/nl_NL.properties"},"userpages":true,"userdashboard":true,"sakaidocs":true},"addpeople":{"id":"addpeople","enabled":true,"description":"Add people","name":"Add people","i18n":{"default":"/devwidgets/addpeople/bundles/default.properties"},"type":"core","url":"/devwidgets/addpeople/addpeople.html"},"contentcomments":{"enabled":true,"groupdashboard":false,"img":"/devwidgets/contentcomments/images/comments.png","siteportal":false,"type":"core","url":"/devwidgets/contentcomments/contentcomments.html","grouppages":false,"id":"contentcomments","showinsakaigoodies":false,"description":"Content Comments","hasSettings":true,"name":"Content Comments","userpages":false,"i18n":{"default":"/devwidgets/contentcomments/bundles/default.properties"},"userdashboard":false},"login":{"id":"login","enabled":true,"hasSettings":true,"description":"Login","name":"Login","type":"core","url":"/devwidgets/login/login.html"},"welcome":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/welcome/welcome.html","grouppages":false,"id":"welcome","showinsakaigoodies":false,"description":"Welcome Widget","hasSettings":true,"name":"Welcome Widget","userpages":false,"i18n":{"default":"/devwidgets/welcome/bundles/default.properties"},"personalportal":false,"userdashboard":false},"sakaidocs":{"enabled":false,"groupdashboard":false,"type":"contrib","url":"/devwidgets/sakaidocs/sakaidocs.html","grouppages":false,"id":"sakaidocs","showinsakaigoodies":false,"description":"Sakai Docs","hasSettings":false,"name":"Sakai Docs","userpages":false,"i18n":{"default":"/devwidgets/sakaidocs/bundles/default.properties"},"personalportal":false,"userdashboard":false},"mymemberships":{"id":"mymemberships","enabled":true,"description":"My Memberships","name":"My Memberships","i18n":{"default":"/devwidgets/mymemberships/bundles/default.properties"},"personalportal":false,"type":"sakai","url":"/devwidgets/mymemberships/mymemberships.html"},"sakai2tools":{"enabled":true,"groupdashboard":true,"ca":true,"img":"/devwidgets/sakai2tools/images/sakai2tools.png","type":"sakai","url":"/devwidgets/sakai2tools/sakai2tools.html","grouppages":true,"id":"sakai2tools","showinsakaigoodies":true,"hasSettings":false,"description":"Sakai 2 Tools","name":"Sakai 2 Tools","i18n":{"default":"/devwidgets/sakai2tools/bundles/default.properties"},"userpages":true,"userdashboard":true,"sakaidocs":true},"searchall":{"enabled":false,"groupdashboard":false,"type":"core","url":"/devwidgets/searchall/searchall.html","grouppages":false,"id":"searchallold","showinsakaigoodies":false,"description":"Search All","hasSettings":false,"name":"Search All","userpages":false,"i18n":{"default":"/devwidgets/searchall/bundles/default.properties"},"personalportal":false,"userdashboard":false},"text":{"enabled":true,"groupdashboard":true,"type":"sakai","url":"/devwidgets/text/text.html","grouppages":false,"id":"text","indexFields":["data/text"],"description":"Text","hasSettings":true,"name":"","userpages":false,"i18n":{"default":"/devwidgets/text/bundles/default.properties"},"userdashboard":true},"joinrequests":{"id":"joinrequests","enabled":true,"hasSettings":false,"description":"Manage join requests for a specific group.","name":"Join Requests","i18n":{"default":"/devwidgets/joinrequests/bundles/default.properties"},"personalportal":false,"type":"core","url":"/devwidgets/joinrequests/joinrequests.html"},"accountpreferences":{"id":"accountpreferences","enabled":true,"description":"Account Preferences","name":"Account Preferences","i18n":{"default":"/devwidgets/accountpreferences/bundles/default.properties"},"type":"core","url":"/devwidgets/accountpreferences/accountpreferences.html"},"contentpicker":{"id":"contentpicker","enabled":true,"description":"Pick Content","name":"Content Picker","i18n":{"default":"/devwidgets/contentpicker/bundles/default.properties"},"type":"core","url":"/devwidgets/contentpicker/contentpicker.html"},"recentmessages":{"id":"recentmessages","enabled":true,"description":"My recent messages","name":"My recent messages","i18n":{"default":"/devwidgets/recentmessages/bundles/default.properties","en_US":"/devwidgets/recentmessages/bundles/en_US.properties"},"personalportal":true,"type":"sakai","url":"/devwidgets/recentmessages/recentmessages.html"},"navigation":{"id":"navigation","enabled":true,"showinsakaigoodies":true,"showinsidebar":true,"hasSettings":true,"description":"Navigate all the pages contained within this group or site","name":"Pages","img":"/devwidgets/navigation/images/icon.png","i18n":{"default":"/devwidgets/navigation/bundles/default.properties"},"type":"core","url":"/devwidgets/navigation/navigation.html"},"recentcontactsnew":{"id":"recentcontactsnew","enabled":true,"hasSettings":false,"description":"My recent contacts","name":"My recent contacts","deletable":true,"i18n":{"default":"/devwidgets/recentcontactsnew/bundles/default.properties"},"personalportal":true,"type":"sakai","url":"/devwidgets/recentcontactsnew/recentcontactsnew.html"},"basiclti":{"enabled":true,"groupdashboard":true,"ca":true,"img":"/devwidgets/basiclti/images/basiclti.png","type":"sakai","url":"/devwidgets/basiclti/basiclti.html","grouppages":true,"id":"basiclti","showinsakaigoodies":true,"hasSettings":false,"description":"Basic LTI Widget","name":"Basic LTI","i18n":{"default":"/devwidgets/basiclti/bundles/default.properties"},"userpages":true,"userdashboard":true,"sakaidocs":true},"lhnavigation":{"enabled":true,"hashParams":["newPageMode","l"],"groupdashboard":false,"type":"core","url":"/devwidgets/lhnavigation/lhnavigation.html","grouppages":false,"id":"lhnavigation","showinsakaigoodies":false,"description":"Left Hand Navigation Widget","hasSettings":true,"name":"Left Hand Navigation","userpages":false,"i18n":{"default":"/devwidgets/lhnavigation/bundles/default.properties"},"personalportal":false,"userdashboard":false},"addtocontacts":{"id":"addtocontacts","enabled":true,"description":"Add a contact","name":"Add a contact","i18n":{"default":"/devwidgets/addtocontacts/bundles/default.properties","zh_CN":"/devwidgets/addtocontacts/bundles/zh_CN.properties"},"type":"core","url":"/devwidgets/addtocontacts/addtocontacts.html"},"searchpeople":{"enabled":false,"groupdashboard":false,"type":"core","url":"/devwidgets/searchpeople/searchpeople.html","grouppages":false,"id":"searchpeople","showinsakaigoodies":false,"description":"Search People","hasSettings":false,"name":"Search People","userpages":false,"i18n":{"default":"/devwidgets/searchpeople/bundles/default.properties"},"personalportal":false,"userdashboard":false},"pickeruser":{"id":"pickeruser","enabled":true,"hasSettings":false,"description":"General people picker widget","name":"People Picker","i18n":{"default":"/devwidgets/pickeruser/bundles/default.properties"},"type":"core","url":"/devwidgets/pickeruser/pickeruser.html"},"personinfo":{"id":"personinfo","enabled":true,"hasSettings":false,"description":"Overlay that displays person info","name":"Personinfo","i18n":{"default":"/devwidgets/personinfo/bundles/default.properties"},"type":"sakai","url":"/devwidgets/personinfo/personinfo.html"},"siterecentactivity":{"id":"siterecentactivity","enabled":true,"showinsidebar":true,"hasSettings":false,"description":"Site Recent Activity","name":"Recent Activity","img":"/devwidgets/siterecentactivity/images/icon.png","type":"core","url":"/devwidgets/siterecentactivity/siterecentactivity.html","grouppages":true},"ggadget":{"enabled":true,"groupdashboard":true,"img":"/devwidgets/ggadget/images/ggadget.png","defaultConfiguration":{"ggadget":{"border_color":"cccccc","border_size":0,"height":350,"url":"http://www.gmodules.com/ig/ifr?url=http://static.die.net/moon/gadget.xml&up_size=medium&up_info=1&up_date=0&synd=open&w=320&h=170&title=Moon+Phase&border=%23ffffff%7C3px%2C1px+solid+%23999999&output=html","width":600,"width_unit":"px"}},"type":"sakai","url":"/devwidgets/ggadget/ggadget.html","grouppages":true,"id":"ggadget","showinsakaigoodies":true,"description":"Embed Google Gadgets in your page or dashboard","hasSettings":true,"name":"Google Gadget","userpages":true,"i18n":{"default":"/devwidgets/ggadget/bundles/default.properties"},"userdashboard":true,"sakaidocs":true},"createpage":{"id":"createpage","enabled":true,"description":"Create Page Widget","name":"Create Page","i18n":{"default":"/devwidgets/createpage/bundles/default.properties"},"type":"core","url":"/devwidgets/createpage/createpage.html"},"mygroups":{"id":"mygroups","enabled":true,"description":"My memberships","name":"My memberships","deletable":true,"i18n":{"default":"/devwidgets/mygroups/bundles/default.properties","en_US":"/devwidgets/mygroups/bundles/en_US.properties","zh_CN":"/devwidgets/mygroups/bundles/zh_CN.properties"},"personalportal":true,"type":"sakai","url":"/devwidgets/mygroups/mygroups.html"},"uploadnewversion":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/uploadnewversion/uploadnewversion.html","grouppages":false,"id":"uploadnewversion","showinsakaigoodies":false,"description":"Upload New Version Widget","hasSettings":true,"name":"Upload New Version Widget","userpages":false,"i18n":{"default":"/devwidgets/uploadnewversion/bundles/default.properties"},"personalportal":false,"userdashboard":false},"changepic":{"id":"changepic","enabled":true,"name":"changepic","i18n":{"default":"/devwidgets/changepic/bundles/default.properties","en_US":"/devwidgets/changepic/bundles/en_US.properties","zh_CN":"/devwidgets/changepic/bundles/zh_CN.properties"},"type":"core","url":"/devwidgets/changepic/changepic.html"},"recentactivity":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/recentactivity/recentactivity.html","grouppages":false,"id":"recentactivity","showinsakaigoodies":false,"description":"Recent Activity Widget","hasSettings":true,"name":"Recent Activity Widget","userpages":false,"i18n":{"default":"/devwidgets/recentactivity/bundles/default.properties"},"personalportal":false,"userdashboard":false},"selecttemplate":{"id":"selecttemplate","enabled":true,"description":"Select Template","name":"Select Template","i18n":{"default":"/devwidgets/selecttemplate/bundles/default.properties","en_US":"/devwidgets/selecttemplate/bundles/en_US.properties"},"type":"core","url":"/devwidgets/selecttemplate/selecttemplate.html"},"featuredworlds":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/featuredworlds/featuredworlds.html","grouppages":false,"id":"featuredworlds","showinsakaigoodies":false,"description":"Featured Worlds Widget","hasSettings":true,"name":"Featured Worlds Widget","userpages":false,"i18n":{"default":"/devwidgets/featuredworlds/bundles/default.properties"},"personalportal":false,"userdashboard":false},"listpeoplewrappergroup":{"id":"listpeoplewrappergroup","enabled":true,"showinsakaigoodies":true,"groupdashboard":true,"hasSettings":true,"description":"Group lister","name":"Member/content list","img":"/devwidgets/listpeoplewrappergroup/images/icon.png","type":"core","url":"/devwidgets/listpeoplewrappergroup/listpeoplewrappergroup.html","grouppages":true},"uploadcontent":{"id":"uploadcontent","enabled":true,"description":"Upload content to Sakai3","name":"Upload Content","i18n":{"default":"/devwidgets/uploadcontent/bundles/default.properties"},"type":"core","url":"/devwidgets/uploadcontent/uploadcontent.html"},"searchgroups":{"enabled":false,"groupdashboard":false,"type":"core","url":"/devwidgets/searchgroups/searchgroups.html","grouppages":false,"id":"searchgroups","showinsakaigoodies":false,"description":"Search Groups","hasSettings":false,"name":"Search Groups","userpages":false,"i18n":{"default":"/devwidgets/searchgroups/bundles/default.properties"},"personalportal":false,"userdashboard":false},"displayprofilesection":{"enabled":true,"groupdashboard":false,"img":"/devwidgets/displayprofilesection/images/displayprofilesection.png","type":"sakai","url":"/devwidgets/displayprofilesection/displayprofilesection.html","grouppages":false,"id":"displayprofilesection","showinsakaigoodies":true,"hasSettings":true,"description":"Profile Section","name":"Profile Section","userpages":true,"i18n":{"default":"/devwidgets/displayprofilesection/bundles/default.properties"},"userdashboard":true},"documentviewer":{"id":"documentviewer","enabled":true,"hasSettings":false,"description":"Document Viewer","name":"Document Viewer","i18n":{"default":"/devwidgets/documentviewer/bundles/default.properties"},"type":"core","url":"/devwidgets/documentviewer/documentviewer.html"},"recentchangedcontent":{"id":"recentchangedcontent","enabled":true,"hasSettings":false,"description":"My recent content","name":"My recent content","deletable":true,"i18n":{"default":"/devwidgets/recentchangedcontent/bundles/default.properties"},"personalportal":true,"type":"sakai","url":"/devwidgets/recentchangedcontent/recentchangedcontent.html"},"versions":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/versions/versions.html","grouppages":false,"id":"versions","showinsakaigoodies":false,"description":"Version Widget","hasSettings":true,"name":"Version Widget","userpages":false,"i18n":{"default":"/devwidgets/versions/bundles/default.properties"},"personalportal":false,"userdashboard":false},"creategroup":{"id":"creategroup","enabled":true,"description":"Create Group","name":"Create Group","i18n":{"default":"/devwidgets/creategroup/bundles/default.properties","en_US":"/devwidgets/creategroup/bundles/en_US.properties"},"type":"core","url":"/devwidgets/creategroup/creategroup.html"},"categories":{"enabled":false,"groupdashboard":false,"type":"sakai","url":"/devwidgets/categories/categories.html","grouppages":false,"id":"categories","showinsakaigoodies":false,"description":"Categories Widget","hasSettings":true,"name":"Categories Widget","userpages":false,"i18n":{"default":"/devwidgets/categories/bundles/default.properties"},"personalportal":false,"userdashboard":false},"topnavigation":{"id":"topnavigation","enabled":true,"description":"topnavigation","name":"topnavigation","i18n":{"default":"/devwidgets/topnavigation/bundles/default.properties","en_US":"/devwidgets/topnavigation/bundles/en_US.properties","nl_NL":"/devwidgets/topnavigation/bundles/nl_NL.properties","zh_CN":"/devwidgets/topnavigation/bundles/zh_CN.properties"},"type":"core","url":"/devwidgets/topnavigation/topnavigation.html"},"tooltip":{"id":"tooltip","enabled":true,"hasSettings":false,"description":"Displays tooltip help dialog boxes","name":"Tooltip","i18n":{"default":"/devwidgets/tooltip/bundles/default.properties"},"type":"sakai","url":"/devwidgets/tooltip/tooltip.html"}}); \ No newline at end of file diff --git a/extensions/app/templates/cornell.html.footer.vm b/extensions/app/templates/cornell.html.footer.vm new file mode 100644 index 00000000..16bd8fe2 --- /dev/null +++ b/extensions/app/templates/cornell.html.footer.vm @@ -0,0 +1,15 @@ +
Formats
+json +RDF +
+
+#if ( $result["http://vivoweb.org/ontology/cu-vivo-osp#harvestedBy"] ) +
Harvested By
${result["http://vivoweb.org/ontology/cu-vivo-osp#harvestedBy"]}
+
+#end +#if ( $result["http://vivoweb.org/ontology/cu-vivo-osp#dateHarvested"] ) +
Harvested Date
${result["http://vivoweb.org/ontology/cu-vivo-osp#dateHarvested"]}
+
+#end + + diff --git a/extensions/app/templates/cornell.html.header.vm b/extensions/app/templates/cornell.html.header.vm new file mode 100644 index 00000000..3c0327c7 --- /dev/null +++ b/extensions/app/templates/cornell.html.header.vm @@ -0,0 +1,42 @@ + + + +${result.rdfs_label} + + + diff --git a/extensions/app/templates/cornell.html.macros.vm b/extensions/app/templates/cornell.html.macros.vm new file mode 100644 index 00000000..8046b489 --- /dev/null +++ b/extensions/app/templates/cornell.html.macros.vm @@ -0,0 +1,80 @@ +## +## +## +## Output a reference. +## +#macro( outputReference $ref ) + #if ( $ref.hasLabelAndKey() ) + ${ref.label} + #else + ${ref} + #end +#end +## +## +## +## Output a label +## +#macro( rdfs_label $value) +
${value}
+#end +## +## +## +## Dump an array +## +#macro( listArray $array ) + #foreach( $e in $array ) +
  • + #if ( $e.isReference() ) + #outputReference($e) + #elseif ( $e.entrySet() && $e.entrySet().size() && $e.entrySet().size() > 0 ) + +
      + #listRdfKeys($e) +
    + + #elseif ( $e.size() && $e.size() > 0 ) + +
      + #listArray($e) +
    + + #else + ${e} + #end +
  • + #end +#end +## +## +## +## Dump a map +## +#macro( listRdfKeys $keyset ) + #foreach( $e in $keyset.entrySet() ) + #if ( $e.getKey() == "rdfs_label" ) + #rdfs_label($e.getValue()) + #else +
  • ${e.getKey()} + #if ( $e.getValue().isReference() ) + #outputReference($e.getValue()) + #elseif ( $e.getValue().entrySet().size() > 0 ) + +
      + #listRdfKeys($e) +
    + + #elseif ( $e.getValue().size() > 0 ) + +
      + #listArray($e.getValue()) +
    + + #else + ${e.getValue()} + #end +
  • + #end + #end +#end \ No newline at end of file diff --git a/extensions/app/templates/cornell.html.vm b/extensions/app/templates/cornell.html.vm new file mode 100644 index 00000000..a35cd526 --- /dev/null +++ b/extensions/app/templates/cornell.html.vm @@ -0,0 +1,62 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") +
    Individual
    $!{result.vivocore_preferredTitle} ${result.rdfs_label}
    +
    +#if ($result.vivocore_personInPosition) +
    Position
    + #outputReference(${result.vivocore_personInPosition}) +
    +
    +#end +#if ($result.vivocore_researchOverview) +
    Overview
    ${result.vivocore_researchOverview}
    +
    +#end +#if ($result.vivocore_hasResearchArea) +
    Research Area
    #outputReference(${result.vivocore_hasResearchArea})
    +
    +#end +#if ($result.vivocore_hasPrincipalInvestigatorRole) +
    PI on Grants
    +
      + #foreach( $role in $result.vivocore_hasPrincipalInvestigatorRole ) +
    • #outputReference(${role})
    • + #end +
    +
    +
    +#end +#if ($result.vivocore_hasCo-PrincipalInvestigatorRole) + +
    Co PI on Grants
    +
      + #foreach( $role in $result.vivocore_hasCo-PrincipalInvestigatorRole ) +
    • #outputReference(${role})
    • + #end +
    +
    +
    +#end +#if ($result.vivocore_hasTeacherRole) +
    Teaching
    +
      + #foreach( $role in $result.vivocore_hasTeacherRole ) + #set( $roleSubject = $role.vivocore_roleIn.substring(13)) + #if ($roleSubject.startsWith("http")) +
    • ${roleSubject}
    • + #else +
    • ${roleSubject}
    • + #end + #end +
    +
    +
    +#end +#if ($result["http://vivo.library.cornell.edu/ns/0.1#professionalBackground"]) +
    Professional Background
    ${result["http://vivo.library.cornell.edu/ns/0.1#professionalBackground"]}
    + +
    +#end +
    Template
    Default
    +#parse("cornell.html.footer.vm") + diff --git a/extensions/app/templates/cornellOrganization.html.vm b/extensions/app/templates/cornellOrganization.html.vm new file mode 100644 index 00000000..5aa621b7 --- /dev/null +++ b/extensions/app/templates/cornellOrganization.html.vm @@ -0,0 +1,64 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") + +
    Organization
    ${result.rdfs_label}
    +
    +#if ($result["http://vivoweb.org/ontology/cu-vivo-osp#deptName"]) +
    Department Name
    +
      + #listArray($result["http://vivoweb.org/ontology/cu-vivo-osp#deptName"]) +
    +
    +
    +#end +#if ($result.vivocore_subcontractsGrant) +
    Subcontract Grants
    +
      + #listArray($result.vivocore_subcontractsGrant) +
    +
    +
    +#end +#if ($result.vivocore_awardsGrant) +
    Award Grants
    +
      + #listArray($result.vivocore_awardsGrant) +
    +
    +
    +#end +#if ($result["http://vivo.library.cornell.edu/ns/0.1#OrganizedEndeavorAddressesResearchArea"]) +
    Research Areas
    +
      + #listArray($result["http://vivo.library.cornell.edu/ns/0.1#OrganizedEndeavorAddressesResearchArea"]) +
    +
    +
    +#end +#if ($result["http://vivo.library.cornell.edu/ns/0.1#cornellOrganizedEndeavorHasLeadParticipantPerson"]) +
    Leader
    +
      + #outputReference($result["http://vivo.library.cornell.edu/ns/0.1#cornellOrganizedEndeavorHasLeadParticipantPerson"]) +
    +
    +
    +#end +#if ($result.vivocore_administers) +
    Administers Grants
    +
      + #listArray($result.vivocore_administers) +
    +
    +
    +#end +#if ($result.vivocore_organizationForPosition) +
    Associated Members
    +
      + #listArray($result.vivocore_organizationForPosition) +
    +
    +
    +#end +
    Template
    Organization
    +#parse("cornell.html.footer.vm") + diff --git a/extensions/app/templates/cornellResearchUnit.html.vm b/extensions/app/templates/cornellResearchUnit.html.vm new file mode 100644 index 00000000..b9c5aa63 --- /dev/null +++ b/extensions/app/templates/cornellResearchUnit.html.vm @@ -0,0 +1,65 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") + +
    Research Unit
    ${result.rdfs_label}
    +
    +
    Department Name
    ${result["http://vivoweb.org/ontology/cu-vivo-osp#deptName"]}
    +
    +#if ($result["http://vivo.library.cornell.edu/ns/0.1#OrganizedEndeavorAddressesResearchArea"]) +
    Research Areas
    +
      + #listArray($result["http://vivo.library.cornell.edu/ns/0.1#OrganizedEndeavorAddressesResearchArea"]) +
    +
    +
    +#end +#if ($result["http://vivo.library.cornell.edu/ns/0.1#cornellOrganizedEndeavorHasLeadParticipantPerson"]) +
    Leader
    +
      + #outputReference($result["http://vivo.library.cornell.edu/ns/0.1#cornellOrganizedEndeavorHasLeadParticipantPerson"]) +
    +
    +
    +#end +#if ($result.vivocore_administers) +
    Administers Grants
    +
      + #listArray($result.vivocore_administers) +
    +
    +
    +#end + +#if ($result.vivocore_organizationForPosition) +
    Associated Members
    +
      + #listArray($result.vivocore_organizationForPosition) +
    +
    +
    +#end +#if ($result["http://vivo.library.cornell.edu/ns/0.1#hasAffiliatedCornellFacultyMember"]) +
    Cornell Faculty Members
    +
      + #listArray($result["http://vivo.library.cornell.edu/ns/0.1#hasAffiliatedCornellFacultyMember"]) +
    +
    +
    +#end +#if ($result["http://vivo.library.cornell.edu/ns/0.1#hasAffiliatedAcademicStaffMember"]) +
    Academic Staff Members
    +
      + #listArray($result["http://vivo.library.cornell.edu/ns/0.1#hasAffiliatedCornellFacultyMember"]) +
    +
    +
    +#end +#if ($result.vivocore_freetextKeyword) +
    Keywords
    + ${result.vivocore_freetextKeyword} +
    +
    +#end +
    Template
    ResearchUnit
    +#parse("cornell.html.footer.vm") + diff --git a/extensions/app/templates/cornellfoaf_Organization.html.vm b/extensions/app/templates/cornellfoaf_Organization.html.vm new file mode 100644 index 00000000..c2cdabf1 --- /dev/null +++ b/extensions/app/templates/cornellfoaf_Organization.html.vm @@ -0,0 +1,32 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") + +
    Organization
    ${result.rdfs_label}
    +
    +#if ($result.vivocore_subcontractsGrant) +
    Subcontract Grants
    +
      + #listArray($result.vivocore_subcontractsGrant) +
    +
    +
    +#end +#if ($result.vivocore_awardsGrant) +
    Award Grants
    +
      + #listArray($result.vivocore_awardsGrant) +
    +
    +
    +#end +#if ($result.vivocore_subOrganizationWithin) +
    Parent Organizations
    +
      + #listArray($result.vivocore_subOrganizationWithin) +
    +
    +
    +#end +
    Template
    foaf_Organization
    +#parse("cornell.html.footer.vm") + diff --git a/extensions/app/templates/cornellvivocore_DateTimeInterval.html.vm b/extensions/app/templates/cornellvivocore_DateTimeInterval.html.vm new file mode 100644 index 00000000..45e96748 --- /dev/null +++ b/extensions/app/templates/cornellvivocore_DateTimeInterval.html.vm @@ -0,0 +1,16 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") +#if ($result.vivocore_start) +
    Start
    + #outputReference(${result.vivocore_start}) +
    +
    +#end +#if ($result.vivocore_end) +
    End
    + #outputReference(${result.vivocore_end}) +
    +
    +#end +
    Template
    vivocore_DateTimeInterval
    +#parse("cornell.html.footer.vm") diff --git a/extensions/app/templates/cornellvivocore_DateTimeValue.html.vm b/extensions/app/templates/cornellvivocore_DateTimeValue.html.vm new file mode 100644 index 00000000..cdadfb29 --- /dev/null +++ b/extensions/app/templates/cornellvivocore_DateTimeValue.html.vm @@ -0,0 +1,10 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") +#if ($result.vivocore_dateTime) +
    Date/Time
    + ${result.vivocore_dateTime} +
    +
    +#end +
    Template
    vivocore_DateTimeValue
    +#parse("cornell.html.footer.vm") diff --git a/extensions/app/templates/cornellvivocore_FacultyMember.html.vm b/extensions/app/templates/cornellvivocore_FacultyMember.html.vm new file mode 100644 index 00000000..3b17dc67 --- /dev/null +++ b/extensions/app/templates/cornellvivocore_FacultyMember.html.vm @@ -0,0 +1,62 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") +
    Faculty Member
    $!{result.vivocore_preferredTitle} ${result.rdfs_label}
    +
    +#if ($result.vivocore_personInPosition) +
    Position
    + #outputReference(${result.vivocore_personInPosition}) +
    +
    +#end +#if ($result.vivocore_researchOverview) +
    Overview
    ${result.vivocore_researchOverview}
    +
    +#end +#if ($result.vivocore_hasResearchArea) +
    Research Area
    #outputReference(${result.vivocore_hasResearchArea})
    +
    +#end +#if ($result.vivocore_hasPrincipalInvestigatorRole) +
    PI on Grants
    +
      + #foreach( $role in $result.vivocore_hasPrincipalInvestigatorRole ) +
    • #outputReference(${role})
    • + #end +
    +
    +
    +#end +#if ($result.vivocore_hasCo-PrincipalInvestigatorRole) + +
    Co PI on Grants
    +
      + #foreach( $role in $result.vivocore_hasCo-PrincipalInvestigatorRole ) +
    • #outputReference(${role})
    • + #end +
    +
    +
    +#end +#if ($result.vivocore_hasTeacherRole) +
    Teaching
    +
      + #foreach( $role in $result.vivocore_hasTeacherRole ) + #set( $roleSubject = $role.vivocore_roleIn) + #if ($roleSubject.startsWith("http")) +
    • ${roleSubject}
    • + #else +
    • ${roleSubject}
    • + #end + #end +
    +
    +#end +#if ($result["http://vivo.library.cornell.edu/ns/0.1#professionalBackground"]) +
    +
    Professional Background
    ${result["http://vivo.library.cornell.edu/ns/0.1#professionalBackground"]}
    + +
    +#end +
    Template
    vivocore_FacultyMember
    +#parse("cornell.html.footer.vm") + diff --git a/extensions/app/templates/cornellvivocore_Grant.html.vm b/extensions/app/templates/cornellvivocore_Grant.html.vm new file mode 100644 index 00000000..8ba5b77e --- /dev/null +++ b/extensions/app/templates/cornellvivocore_Grant.html.vm @@ -0,0 +1,44 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") +
    Grant Title
    ${result.rdfs_label}
    +
    +#if ($result.vivocore_grantAwardedBy) +
    Awarded By
    + #outputReference(${result.vivocore_grantAwardedBy}) +
    +
    +#end +#if ($result.vivocore_administeredBy) +
    Administered By
    + #outputReference(${result.vivocore_administeredBy}) +
    +
    +#end +#if ($result["http://vivoweb.org/ontology/cu-vivo-osp#sponsorLevelOne"]) +
    Sponsor Level One
    + ${result["http://vivoweb.org/ontology/cu-vivo-osp#sponsorLevelOne"]} +
    +
    +#end +#if ($result["http://vivoweb.org/ontology/cu-vivo-osp#sponsorLevelTwo"]) +
    Sponsor Level Two
    + ${result["http://vivoweb.org/ontology/cu-vivo-osp#sponsorLevelTwo"]} +
    +
    +#end +#if ($result["http://vivoweb.org/ontology/cu-vivo-osp#sponsorLevelThree"]) +
    Sponsor Level Three
    + ${result["http://vivoweb.org/ontology/cu-vivo-osp#sponsorLevelThree"]} +
    +
    +#end +#if ($result.vivocore_dateTimeInterval) +
    Date Period
    + #outputReference(${result.vivocore_dateTimeInterval}) +
    +
    +#end +
    Template
    vivocore_Grant
    +#parse("cornell.html.footer.vm") + + diff --git a/extensions/app/templates/cornellvivocore_Role.html.vm b/extensions/app/templates/cornellvivocore_Role.html.vm new file mode 100644 index 00000000..1c92a6bd --- /dev/null +++ b/extensions/app/templates/cornellvivocore_Role.html.vm @@ -0,0 +1,28 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") +
    Role
    +
    +#if ($result.vivocore_principalInvestigatorRoleOf) +Principal Investigator +#elseif ($result.vivocore_InvestigatorRole) +Invstigator +#elseif ($result.vivocore_ResearcherRole) +Researcher +#else +Member +#end +
    +
    +
    Formats
    +json +RDF +
    +
    +#if ($result.vivocore_roleIn) +
    In
    + #outputReference(${result.vivocore_roleIn}) +
    +
    +#end +
    Template
    vivocore_Role
    +#parse("cornell.html.footer.vm") diff --git a/extensions/app/templates/cornellvivocore_SubjectArea.html.vm b/extensions/app/templates/cornellvivocore_SubjectArea.html.vm new file mode 100644 index 00000000..a1399963 --- /dev/null +++ b/extensions/app/templates/cornellvivocore_SubjectArea.html.vm @@ -0,0 +1,30 @@ +#parse("cornell.html.macros.vm") +#parse("cornell.html.header.vm") + +
    Subject Area
    ${result.rdfs_label}
    +
    +#if ($result["http://vivo.library.cornell.edu/ns/0.1#ResearchAreaSubResearchAreaOfResearchArea"]) +
    Sub Research Areas
    +
      + #listArray($result["http://vivo.library.cornell.edu/ns/0.1#ResearchAreaSubResearchAreaOfResearchArea"]) +
    +
    +
    +#end +#if ($result["http://vivo.library.cornell.edu/ns/0.1#ResearchAreaAddressedByOrganizedEndeavor"]) +
    Research Areas Addressed
    +
      + #listArray($result["http://vivo.library.cornell.edu/ns/0.1#ResearchAreaAddressedByOrganizedEndeavor"]) +
    +
    +
    +#end +#if ($result.vivocore_partOf) +
    Part Of
    + #outputReference($result.vivocore_partOf) +
    +
    +#end +
    Template
    vivocore_SubjectArea
    +#parse("cornell.html.footer.vm") + diff --git a/extensions/app/templates/vivoprofile.vm b/extensions/app/templates/vivoprofile.vm new file mode 100644 index 00000000..ff419161 --- /dev/null +++ b/extensions/app/templates/vivoprofile.vm @@ -0,0 +1,14 @@ + +
    +
    Profile of
    ${result.rdfs_label}
    +
    Title
    ${result.vivocore_preferredTitle}
    +
    LastName
    ${result.foaf_lastName}
    +
    FirstName
    ${result.foaf_firstName}
    +
    Primary Email
    ${result.vivocore_primaryEmail}
    +
    Role
    ${result.vitro_mostSpecificType}
    +
    Research Area
    ${result.vivocore_hasResearchArea.rdfs_label}
    +
    Possition
    ${result.vivocore_personInPosition.rdfs_label}
    +
    Organiza
    ${result.vivocore_personInPosition.vivocore_positionInOrganization.rdfs_label}
    +
    Possition Type
    ${result.vivocore_personInPosition.vitro_mostSpecificType}
    + + diff --git a/extensions/http/pom.xml b/extensions/http/pom.xml new file mode 100644 index 00000000..be766111 --- /dev/null +++ b/extensions/http/pom.xml @@ -0,0 +1,112 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + uk.co.tfd.sm.http + bundle + 0.1-SNAPSHOT + Sparse Map :: Http Operations + Provides Http support. + + + + org.apache.felix + maven-scr-plugin + + + org.apache.felix + maven-bundle-plugin + true + + + sparse-map + uk.co.tfd.sm.api.http.* + uk.co.tfd.sm.http.* + + com.google.common.collect; version="9.0.0", + * + + true + + + + + + + + + org.slf4j + slf4j-simple + 1.5.10 + + + org.apache.felix + org.apache.felix.scr.annotations + + + com.googlecode.guava-osgi + guava-osgi + 9.0.0 + + + junit + junit + 4.4 + jar + compile + + + javax.servlet + servlet-api + 2.4 + jar + compile + + + org.mockito + mockito-all + 1.8.5 + jar + test + + + com.google.code.gson + gson + 1.7.1 + jar + compile + + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5.1-SNAPSHOT + compile + + + + org.sakaiproject.nakamura + uk.co.tfd.sm.template + 0.1-SNAPSHOT + compile + + + org.sakaiproject.nakamura + uk.co.tfd.sm.memory + 0.1-SNAPSHOT + compile + + + org.sakaiproject.nakamura + uk.co.tfd.sm.jetty + 0.1-SNAPSHOT + compile + + + diff --git a/extensions/http/pom.xml.safe b/extensions/http/pom.xml.safe new file mode 100644 index 00000000..45e793a6 --- /dev/null +++ b/extensions/http/pom.xml.safe @@ -0,0 +1,158 @@ + + + + 4.0.0 + + org.sakaiproject.nakamura + base + 1.1-SNAPSHOT + ../../pom.xml + + org.sakaiproject.nakamura.batch + bundle + Sakai Nakamura :: Batch Bundle + 1.1-SNAPSHOT + Batch operations such as batch GET, POST, DELETE and tree... + + + + org.apache.felix + maven-scr-plugin + + + org.apache.felix + maven-bundle-plugin + true + + + sakai-nakamura + org.sakaiproject.nakamura.batch.* + true + + + + + + + + javax.servlet + servlet-api + 2.5 + provided + + + javax.jcr + jcr + + + org.slf4j + slf4j-api + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.utils + 1.1-SNAPSHOT + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.templates + 1.1-SNAPSHOT + + + org.apache.sling + org.apache.sling.commons.osgi + 2.0.4-incubator + compile + + + org.apache.felix + org.osgi.compendium + + + junit + junit + + + org.easymock + easymock + + + org.slf4j + slf4j-simple + + + org.apache.jackrabbit + jackrabbit-api + 2.0.0 + test + + + org.apache.sling + org.apache.sling.servlets.post + 2.0.4-incubator + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.resource + 1.1-SNAPSHOT + provided + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.tika + 0.9-1.1-SNAPSHOT + provided + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.memory + 1.1-SNAPSHOT + provided + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.doc + 1.1-SNAPSHOT + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.jetty-config + 1.1-SNAPSHOT + + + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.testutils + 1.1-SNAPSHOT + test + + + org.apache.sling + org.apache.sling.commons.testing + 2.0.5-20100823 + test + + + org.apache.sling + org.apache.sling.fsresource + 1.0.0 + test + + + diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/api/http/CSRFProtectionService.java b/extensions/http/src/main/java/uk/co/tfd/sm/api/http/CSRFProtectionService.java new file mode 100644 index 00000000..5dc44e37 --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/api/http/CSRFProtectionService.java @@ -0,0 +1,23 @@ +package uk.co.tfd.sm.api.http; + +import javax.servlet.http.HttpServletRequest; + +/** + * This service provides Cross Site Request Forgery protection tokens. Those tokens + * may be checked by a filter or something else to ensure that posts are being performed + * by genuine clients + * @author ieb + * + */ +public interface CSRFProtectionService { + + /** + * Get a Token based on the request, should take into account at least the + * host where the request is being made. The token should also be relatively + * secure and not leaked unnecessarily. Ideally the token will timeout. + * @param request + * @return + */ + String getCSRFToken(HttpServletRequest request); + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/api/http/IdentityRedirectService.java b/extensions/http/src/main/java/uk/co/tfd/sm/api/http/IdentityRedirectService.java new file mode 100644 index 00000000..3fc7942f --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/api/http/IdentityRedirectService.java @@ -0,0 +1,39 @@ +package uk.co.tfd.sm.api.http; + +import javax.servlet.http.HttpServletRequest; + +/** + * Provides a mechanism to transfer identity from one host to annother with a + * short lived token. + * + * @author ieb + * + */ +public interface IdentityRedirectService { + + /** + * Extract the identity from the request. + * + * @param request + * the inbound request that should have been created by + * getRedirectIdentityUrl. + * @return the identity that was transfered or null if there was no identity + * in the request. + */ + String getIdentity(HttpServletRequest request); + + /** + * Get a redirect with identity + * + * @param request + * the request that needs to be transfered. It will be a GET + * request and it will be transfered unchanged to the target + * host. + * @param identity + * the identity to transfer. + * @return the url of the redirection or null if there is no target. The URL + * will include additional parameters to perform the transfer. + */ + String getRedirectIdentityUrl(HttpServletRequest request, String identity); + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/api/http/ServerProtectionService.java b/extensions/http/src/main/java/uk/co/tfd/sm/api/http/ServerProtectionService.java new file mode 100644 index 00000000..05fbfd6b --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/api/http/ServerProtectionService.java @@ -0,0 +1,22 @@ +package uk.co.tfd.sm.api.http; + +import javax.servlet.http.HttpServletRequest; + +/** + * A service to protect the server from rogue requests. It should prevent + * certain types of request from certain hosts, and may validate the tokens of + * other parts of the request. It should only use information in the request and + * not try and resolve the request into a resource. + * + * @author ieb + * + */ +public interface ServerProtectionService { + + public enum Action { + OK(), FORBID(); + } + + public Action checkAction(HttpServletRequest request); + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/ServerProtectionFilter.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/ServerProtectionFilter.java new file mode 100644 index 00000000..d72c9596 --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/ServerProtectionFilter.java @@ -0,0 +1,69 @@ +package uk.co.tfd.sm.http; + +import java.io.IOException; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Properties; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.http.ServerProtectionService; +import uk.co.tfd.sm.api.http.ServerProtectionService.Action; + +/** + * Performs server protection for user content based on the URL alone. Filtering + * of user content is performed in the place where user content is served from. + * + * @author ieb + * + */ +@Component(immediate = true, metatype = true) +@Service(value = Filter.class) +@Properties(value={ + @Property(name="pattern", value="/.*") +}) +public class ServerProtectionFilter implements Filter { + + private static final Logger LOGGER = LoggerFactory.getLogger(ServerProtectionFilter.class); + @Reference + private ServerProtectionService serverProtectionService; + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain chain) throws IOException, ServletException { + Action ac = serverProtectionService + .checkAction((HttpServletRequest) request); + switch (ac) { + case OK: + LOGGER.debug("OK"); + chain.doFilter(request, response); + break; + case FORBID: + LOGGER.debug("Forbid"); + ((HttpServletResponse) response) + .sendError(HttpServletResponse.SC_FORBIDDEN); + break; + } + } + + @Override + public void destroy() { + } + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/ServerProtectionServiceImpl.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/ServerProtectionServiceImpl.java new file mode 100644 index 00000000..f5157e67 --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/ServerProtectionServiceImpl.java @@ -0,0 +1,469 @@ +package uk.co.tfd.sm.http; + +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.security.InvalidKeyException; +import java.security.Key; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Enumeration; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import javax.crypto.Mac; +import javax.crypto.spec.SecretKeySpec; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang.StringUtils; +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Modified; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Service; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.http.CSRFProtectionService; +import uk.co.tfd.sm.api.http.IdentityRedirectService; +import uk.co.tfd.sm.api.http.ServerProtectionService; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; + +/** + * Implements both the ServerProtectionService and a CSRFProtectionService. How + * each host is protected + * + * @author ieb + * + */ +@Component(immediate = false, metatype=true) +@Service(value = { ServerProtectionService.class, CSRFProtectionService.class, + IdentityRedirectService.class }) +public class ServerProtectionServiceImpl implements ServerProtectionService, + CSRFProtectionService, IdentityRedirectService { + + // only put urls that have to be excluded here. + @Property(value = { + "/system/console" + }) + private static final String WHITELIST = "whitelist"; + @Property(value = "secret, change in production") + private static final String SECRET = "secret"; + private static final String UTF_8 = "UTF-8"; + private static final String SHA_256 = "SHA-256"; + private static final String HMAC_SHA256 = "HmacSHA256"; + @Property(value = { "referer;localhost:8080;http://localhost:8080/", + "csrf;localhost:8080;", "usercontent;localhost:8082;", + "redirect;localhost:8080;http://localhost:8082" }) + private static final String HOSTS = "hosts"; + + public interface HostType { + + boolean requestSafe(HttpServletRequest request); + + } + + private static final Set SAFE_METHODS = ImmutableSet.of("GET", + "HEAD", "OPTIONS", "PROPGET"); + + /** + * Redirect host types redirect from one host to another. + * + * @author ieb + * + */ + public class RedirectHostType implements HostType { + + private String target; + + public RedirectHostType(String host, String target) { + this.target = target; + } + + @Override + public boolean requestSafe(HttpServletRequest request) { + return false; + } + + public String getRedirectTarget(HttpServletRequest request, + String userId) { + // convert the request URL into a target url replacing the protocol, + // host, port part with the target, and add a user transfer + String requestURI = request.getRequestURI(); + String queryString = request.getQueryString(); + String hmac = getRequestHmac(requestURI, userId); + try { + if (queryString == null) { + queryString = "_hmac=" + URLEncoder.encode(hmac, UTF_8); + } else { + queryString = queryString + "&_hmac=" + + URLEncoder.encode(hmac, UTF_8); + } + } catch (UnsupportedEncodingException e) { + LOGGER.debug(" No UTF-8 Support, check you JVM, its broken"); + } + return target + requestURI + "?" + queryString; + } + + } + + /** + * User Content host types are only safe for GET operations. + * + * @author ieb + * + */ + public class UserContentHostType implements HostType { + + + public UserContentHostType(String host) { + } + + @Override + public boolean requestSafe(HttpServletRequest request) { + return SAFE_METHODS.contains(request.getMethod()); + } + + + public String getTransferUserId(HttpServletRequest request) { + return checkRequestHmac(request); + } + + } + + public class CSRFHostType implements HostType { + + private String host; + + public CSRFHostType(String host) { + this.host = host; + } + + @Override + public boolean requestSafe(HttpServletRequest request) { + if (SAFE_METHODS.contains(request.getMethod())) { + return true; + } + return checkCSRFToken(request, host); + } + + public String getToken(HttpServletRequest request) { + return createCSRFToken(request, host); + } + + } + + public class RefererHostType implements HostType { + + private String safeRefererStub; + + public RefererHostType(String host, String safeRefererStub) { + this.safeRefererStub = safeRefererStub; + } + + public boolean requestSafe(HttpServletRequest request) { + if (SAFE_METHODS.contains(request.getMethod())) { + return true; + } + if (safeRefererStub != null) { + String referer = getReferer(request); + if (referer != null) { + if (referer.startsWith("/") + || referer.startsWith(safeRefererStub)) { + return true; + } + } + } + return false; + } + + private String getReferer(HttpServletRequest request) { + @SuppressWarnings("unchecked") + Enumeration referers = request.getHeaders("Referer"); + String referer = null; + if (referers == null || !referers.hasMoreElements()) { + LOGGER.debug("No Referer header present "); + return null; + } + referer = referers.nextElement(); + if (referer == null) { + LOGGER.debug("No Referer header present, was null "); + } + return referer; + } + + } + + private static final Logger LOGGER = LoggerFactory + .getLogger(ServerProtectionServiceImpl.class); + private Map hosts; + private Key[] transferKeys; + private String[] whitelist; + + @Activate + public void activate(Map properties) throws NoSuchAlgorithmException, UnsupportedEncodingException { + modified(properties); + } + + @Modified + public void modified(Map properties) + throws NoSuchAlgorithmException, UnsupportedEncodingException { + Map> ht = Maps.newHashMap(); + String[] hostsConfig = toStringArray(properties.get(HOSTS)); + if (hostsConfig != null) { + for (String host : hostsConfig) { + String[] hostConfig = StringUtils.split(host, ";"); + List htl = ht.get(hostConfig[1]); + if (htl == null) { + htl = Lists.newArrayList(); + ht.put(hostConfig[1], htl); + } + // format is type;host;config + if ("referer".equals(hostConfig[0])) { + htl.add(new RefererHostType(hostConfig[1], hostConfig[2])); + } else if ("csrf".equals(hostConfig[0])) { + htl.add(new CSRFHostType(hostConfig[1])); + } else if ("usercontent".equals(hostConfig[0])) { + htl.add(new UserContentHostType(hostConfig[1])); + } else if ("redirect".equals(hostConfig[0])) { + htl.add(new RedirectHostType(hostConfig[1], hostConfig[2])); + } else { + LOGGER.warn("Uknown host config type {} ", host); + } + } + } + Builder b = ImmutableMap.builder(); + for (Entry> e : ht.entrySet()) { + if (e.getValue().size() > 0) { + b.put(e.getKey(), + e.getValue().toArray(new HostType[e.getValue().size()])); + } + } + hosts = b.build(); + + String transferSharedSecret = (String) properties.get(SECRET); + transferKeys = new Key[10]; + MessageDigest md = MessageDigest.getInstance(SHA_256); + Base64 encoder = new Base64(true); + byte[] input = transferSharedSecret.getBytes(UTF_8); + // create a static ring of 10 keys by repeatedly hashing the last key + // seed + // starting with the transferSharedSecret + for (int i = 0; i < transferKeys.length; i++) { + md.reset(); + byte[] data = md.digest(input); + transferKeys[i] = new SecretKeySpec(data, HMAC_SHA256); + input = encoder.encode(data); + } + + whitelist = toStringArray(properties.get(WHITELIST)); + + } + + private String[] toStringArray(Object object) { + if ( object instanceof String[] ) { + return (String[]) object; + } + return new String[]{ String.valueOf(object) }; + } + + protected String getRequestHmac(String requestURI, String userId) { + long expires = System.currentTimeMillis() + 600000L; + return hash(requestURI, userId, expires); + } + + protected String checkRequestHmac(HttpServletRequest request) { + String p = request.getParameter("_hmac"); + if (p == null) { + LOGGER.debug("No token"); + return null; + } + String[] parts = StringUtils.split(p, ";"); + if (parts == null || parts.length < 3) { + LOGGER.debug("too short {} ",p); + return null; + } + long expires = Long.parseLong(parts[1]); + if (System.currentTimeMillis() > expires) { + LOGGER.debug("Expired {} ",p); + return null; + } + if (p.equals(hash(request.getRequestURI(), parts[2], expires))) { + LOGGER.debug("Expired, bad hash {} {} ",p,hash(request.getRequestURI(), "", expires)); + return parts[2]; + } + return null; + } + + protected String createCSRFToken(HttpServletRequest request, String host) { + long expires = System.currentTimeMillis() + 36000000L; + return hash(host, "", expires); + } + + protected boolean checkCSRFToken(HttpServletRequest request, String host) { + String p = request.getParameter("_csrft"); + if (p == null) { + LOGGER.debug("No token"); + return false; + } + String[] parts = StringUtils.split(p, ";"); + if (parts == null || parts.length < 2) { + LOGGER.debug("Invalid Token, too short {} ",p); + return false; + } + long expires = Long.parseLong(parts[1]); + if (System.currentTimeMillis() > expires) { + LOGGER.debug("Expired, too short {} ",p); + return false; + } + + if ( ! p.equals(hash(host, "", expires) )) { + LOGGER.debug("Expired, bad hash {} {} ",p,hash(host, "", expires)); + return false; + } + return true; + } + + private String hash(String privatePayload, String publicPayload, long expires) { + try { + int keyIndex = (int) (expires - ((expires / 10) * 10)); + Mac m = Mac.getInstance(HMAC_SHA256); + m.init(transferKeys[keyIndex]); + + String message = privatePayload + publicPayload + expires; + m.update(message.getBytes(UTF_8)); + StringBuilder sb = new StringBuilder(); + sb.append(new String(Base64.encodeBase64URLSafe(m.doFinal()))); + sb.append(";"); + sb.append(expires); + sb.append(";"); + sb.append(publicPayload); + LOGGER.debug("Hashing {} to {} ",message, sb.toString()); + return sb.toString(); + } catch (NoSuchAlgorithmException e) { + LOGGER.error("Unable to hash token, please check JVM for SHA256"); + } catch (UnsupportedEncodingException e) { + LOGGER.error("No UTF-8 char set, check JVM"); + } catch (InvalidKeyException e) { + LOGGER.error("Invlid Key used in hash"); + } + return null; + } + + private boolean inWhitelist(HttpServletRequest request) { + String requestURI = request.getRequestURI(); + for (String openUrl : whitelist) { + if ( requestURI.startsWith(openUrl)) { + return true; + } + } + return false; + } + + + private String buildTrustedHostHeader(HttpServletRequest request) { + // try the host header first + String host = request.getHeader("Host"); + if (host != null && host.trim().length() > 0) { + LOGGER.debug("Host header taknen from http host header as [{}]", + host); + return host; + } + // if not suitable resort to letting jetty build the host header + int port = request.getServerPort(); + String scheme = request.getScheme(); + String serverName = request.getServerName(); + // default ports are not added to the header. + if ((port == 80 && "http".equals(scheme)) + || (port == 443 && "https".equals(scheme))) { + LOGGER.debug( + "Host header not present, constructed assuming default port from request [{}]", + serverName); + return serverName; + } else { + LOGGER.debug("Host header not present, from request [{}:{}]", + serverName, port); + return serverName + ":" + port; + } + } + + @Override + public Action checkAction(HttpServletRequest request) { + if ( inWhitelist(request)) { + return Action.OK; + } + // get the host + String hostHeader = buildTrustedHostHeader(request); + // check the host + HostType[] htl = hosts.get(hostHeader); + if (htl != null) { + for (HostType host : htl) { + if (host.requestSafe(request)) { + return Action.OK; + } + } + } + return Action.FORBID; + } + + + @Override + public String getCSRFToken(HttpServletRequest request) { + if ( inWhitelist(request)) { + return null; + } + String hostHeader = buildTrustedHostHeader(request); + HostType[] htl = hosts.get(hostHeader); + if (htl != null) { + for (HostType host : htl) { + if (host instanceof CSRFHostType) { + return ((CSRFHostType) host).getToken(request); + } + } + } + return null; + } + + @Override + public String getRedirectIdentityUrl(HttpServletRequest request, + String userId) { + if ("GET".equals(request.getMethod())) { + String hostHeader = buildTrustedHostHeader(request); + HostType[] htl = hosts.get(hostHeader); + if (htl != null) { + for (HostType host : htl) { + if (host instanceof RedirectHostType) { + return ((RedirectHostType) host).getRedirectTarget( + request, userId); + } + } + } + } + return null; + } + + @Override + public String getIdentity(HttpServletRequest request) { + if ("GET".equals(request.getMethod())) { + String hostHeader = buildTrustedHostHeader(request); + HostType[] htl = hosts.get(hostHeader); + if (htl != null) { + for (HostType host : htl) { + if (host instanceof UserContentHostType) { + return ((UserContentHostType) host) + .getTransferUserId(request); + } + } + } + } + return null; + } + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/BatchProcessor.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/BatchProcessor.java new file mode 100644 index 00000000..1de8c398 --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/BatchProcessor.java @@ -0,0 +1,205 @@ +package uk.co.tfd.sm.http.batch; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; +import java.security.MessageDigest; +import java.util.Dictionary; +import java.util.Enumeration; + +import javax.servlet.RequestDispatcher; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.codec.binary.Base64; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.memory.Cache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.gson.stream.JsonWriter; + +public class BatchProcessor { + + private static final Logger LOGGER = LoggerFactory + .getLogger(BatchProcessor.class); + private Cache responseCache; + + public BatchProcessor(Cache cache) { + this.responseCache = cache; + } + + /** + * Takes the original request and starts the batching. + * + * @param request + * @param response + * @throws IOException + * @throws ServletException + */ + protected void batchRequest(HttpServletRequest request, + HttpServletResponse response, String jsonRequest, + boolean allowModify) throws IOException, ServletException { + JsonParser jsonParser = new JsonParser(); + JsonElement element = jsonParser.parse(jsonRequest); + if (!element.isJsonArray()) { + response.sendError(HttpServletResponse.SC_BAD_REQUEST, + "Failed to parse the requests parameter"); + return; + } + + JsonArray arr = element.getAsJsonArray(); + response.setContentType("application/json"); + response.setCharacterEncoding("UTF-8"); + + String key = null; + try { + MessageDigest md = MessageDigest.getInstance("SHA-1"); + key = Base64.encodeBase64URLSafeString(md.digest(jsonRequest + .getBytes("UTF-8"))); + String cachedResult = responseCache.get(key); + if (cachedResult != null) { + LOGGER.debug("Using Cache"); + response.getWriter().write(cachedResult); + return; + } + } catch (Exception e) { + LOGGER.warn(e.getMessage(), e); + } + + boolean cache = (key != null); + CaptureResponseWriter captureResponseWriter = new CaptureResponseWriter( + response.getWriter()); + JsonWriter write = new JsonWriter(captureResponseWriter); + write.beginObject(); + write.name("results"); + write.beginArray(); + for (int i = 0; i < arr.size(); i++) { + JsonObject obj = arr.get(i).getAsJsonObject(); + try { + RequestInfo r = new RequestInfo(obj); + if (r.isValid() && (allowModify || r.isSafe())) { + cache = doRequest(request, response, r, write) && cache; + } else { + outputFailure("Bad request, ignored " + obj.toString(), + write); + } + } catch (MalformedURLException e) { + outputFailure("Bad request, ignored " + obj.toString(), write); + } + } + write.endArray(); + write.endObject(); + write.flush(); + if (cache) { + responseCache.put(key, captureResponseWriter.toString()); + } + } + + private void outputFailure(String message, JsonWriter write) + throws IOException { + write.beginObject(); + write.name("success"); + write.value(false); + write.name("status"); + write.value(400); + write.name("message"); + write.value(message); + write.endObject(); + } + + private boolean doRequest(HttpServletRequest request, + HttpServletResponse response, RequestInfo requestInfo, + JsonWriter write) throws ServletException, IOException { + + boolean cache = true; + if (!"GET".equals(requestInfo.getMethod())) { + cache = false; + String user = request.getRemoteUser(); + + if (user == null || user.length() == 0 + || User.ANON_USER.equals(request.getRemoteUser())) { + response.reset(); + throw new ServletException( + "Anon Users may only perform GET operations"); + } + } + String requestPath = requestInfo.getUrl(); + + // Wrap the request and response. + RequestWrapper requestWrapper = new RequestWrapper(request, requestInfo); + ResponseWrapper responseWrapper = new ResponseWrapper(response); + RequestDispatcher requestDispatcher; + try { + requestDispatcher = request.getRequestDispatcher(requestPath); + requestDispatcher.forward(requestWrapper, responseWrapper); + cache = writeResponse(write, responseWrapper, requestInfo) && cache; + } catch (ServletException e) { + writeFailedRequest(write, requestInfo); + cache = false; + } catch (IOException e) { + writeFailedRequest(write, requestInfo); + cache = false; + } + + return cache; + + } + + private boolean writeResponse(JsonWriter write, + ResponseWrapper responseWrapper, RequestInfo requestData) + throws IOException { + boolean cache = true; + try { + String body = responseWrapper.getDataAsString(); + write.beginObject(); + write.name("url"); + write.value(requestData.getUrl()); + write.name("success"); + write.value(true); + write.name("body"); + write.value(body); + write.name("status"); + write.value(responseWrapper.getResponseStatus()); + write.name("statusmessage"); + write.value(responseWrapper.getResponseStatusMessage()); + write.name("headers"); + write.beginObject(); + Dictionary headers = responseWrapper + .getResponseHeaders(); + Enumeration keys = headers.keys(); + while (keys.hasMoreElements()) { + String k = keys.nextElement(); + if ("cache-control".equalsIgnoreCase(k) + && (headers.get(k).contains("private") || headers + .get(k).contains("no-cache"))) { + cache = false; + } + write.name(k); + write.value(headers.get(k)); + } + write.endObject(); + write.endObject(); + } catch (UnsupportedEncodingException e) { + writeFailedRequest(write, requestData); + cache = false; + } + return cache; + } + + private void writeFailedRequest(JsonWriter write, RequestInfo requestData) + throws IOException { + write.beginObject(); + write.name("url"); + write.value(requestData.getUrl()); + write.name("success"); + write.value(false); + write.endObject(); + } + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/BatchServlet.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/BatchServlet.java new file mode 100644 index 00000000..a0886b9c --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/BatchServlet.java @@ -0,0 +1,127 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.http.batch; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.StringWriter; +import java.util.Map; + +import javax.servlet.Servlet; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.memory.Cache; +import org.sakaiproject.nakamura.api.memory.CacheManagerService; +import org.sakaiproject.nakamura.api.memory.CacheScope; + +import uk.co.tfd.sm.api.template.TemplateService; + +@Component(immediate = true, metatype = true) +@Service(value = Servlet.class) +@Property(name = "alias", value = "/system/batch") +public class BatchServlet extends HttpServlet { + + private static final String REQUEST_TEMPLATE = "t"; + + private static final long serialVersionUID = 419598445499567027L; + + protected static final String REQUESTS_PARAMETER = "requests"; + + private BatchProcessor batchProcessor; + + @Reference + protected TemplateService templateService; + + @Reference + protected CacheManagerService cacheManagerService; + + + @Activate + public void activate(Map properties) throws FileNotFoundException, IOException { + Cache cache = cacheManagerService.getCache(BatchProcessor.class.getName(), CacheScope.INSTANCE); + batchProcessor = new BatchProcessor(cache); + + } + + @Override + protected void doGet(HttpServletRequest request, + HttpServletResponse response) throws ServletException, IOException { + + batchRequest(request, response, false); + } + + @Override + protected void doPost(HttpServletRequest request, + HttpServletResponse response) throws ServletException, IOException { + batchRequest(request, response, true); + } + + @Override + protected void doDelete(HttpServletRequest request, + HttpServletResponse response) throws ServletException, IOException { + response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); + } + + /** + * {@inheritDoc} + * + * @see org.apache.sling.api.servlets.SlingAllMethodsServlet#doPut(org.apache.sling.api.SlingHttpServletRequest, + * org.apache.sling.api.SlingHttpServletResponse) + */ + @Override + protected void doPut(HttpServletRequest request, + HttpServletResponse response) throws ServletException, IOException { + response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); + } + + /** + * Takes the original request and starts the batching. + * + * @param request + * @param response + * @throws IOException + * @throws ServletException + */ + @SuppressWarnings("unchecked") + protected void batchRequest(HttpServletRequest request, + HttpServletResponse response, boolean allowModify) throws IOException, ServletException { + String json = request.getParameter(REQUESTS_PARAMETER); + String template = request.getParameter(REQUEST_TEMPLATE); + if ( template != null && template.length() > 0 ) { + if ( templateService.checkTemplateExists(template)) { + StringWriter processedTemplate = new StringWriter(); + templateService.process(request.getParameterMap(), "UTF-8", processedTemplate, template); + json = processedTemplate.toString(); + } else { + response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Template specified in request parameter t does not exist"); + } + } + + + batchProcessor.batchRequest(request, response, json, allowModify); + } + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/CaptureResponseWriter.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/CaptureResponseWriter.java new file mode 100644 index 00000000..64d561d4 --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/CaptureResponseWriter.java @@ -0,0 +1,44 @@ +package uk.co.tfd.sm.http.batch; + +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.Writer; + +public class CaptureResponseWriter extends Writer { + + private PrintWriter writer; + private StringWriter internalWriter; + + public CaptureResponseWriter(PrintWriter writer) { + this.writer = writer; + this.internalWriter = new StringWriter(); + } + + @Override + public void close() throws IOException { + this.writer.close(); + this.internalWriter.close(); + + } + + @Override + public void flush() throws IOException { + this.writer.flush(); + this.internalWriter.flush(); + } + + @Override + public void write(char[] buff, int off, int len) throws IOException { + writer.write(buff,off,len); + this.internalWriter.write(buff, off, len); + } + + @Override + public String toString() { + return this.internalWriter.toString(); + } + + + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/RequestInfo.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/RequestInfo.java new file mode 100644 index 00000000..ccd98ae4 --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/RequestInfo.java @@ -0,0 +1,175 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + +package uk.co.tfd.sm.http.batch; + + +import java.net.MalformedURLException; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.ImmutableSet; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +/** + * Class used to hold information about a request. eg: type, parameters, url.. + */ +public class RequestInfo { + + private static final String ALLOWED_URL_CHARS = "$-_.+!*'(),/?&:;=@% ~^"; + private static final Set VALID_METHODS = ImmutableSet.of("GET","POST","PUT","DELETE","OPTIONS","HEAD"); + private static final Set VALID_SAFE_METHODS = ImmutableSet.of("GET","HEAD"); + private String url; + private String method; + private Map parameters; + + public RequestInfo(String url, Map parameters) throws MalformedURLException { + setUrl(url); + parameters = ImmutableMap.copyOf(parameters); + } + + /** + * Set a default requestinfo object. + */ + public RequestInfo() { + } + + /** + * Get a RequestInfo object created from a JSON block. This json object has to be in the + * form of + * + *
    +   * [
    +   * {
    +   *   "url" : "/foo/bar",
    +   *   "method" : "POST",
    +   *   "parameters\" : {
    +   *     "val" : 123,
    +   *     "val@TypeHint" : "Long"
    +   *   }
    +   * },
    +   * {
    +   *   "url" : "/_user/a/ad/admin/public/authprofile.json",
    +   *   "method" : "GET"
    +   * }
    +   * ]
    +   * 
    + * + * @param obj + * The JSON object containing the information to base this RequestInfo on. + * @throws JSONException + * The JSON object could not be interpreted correctly. + * @throws MalformedURLException + */ + public RequestInfo(JsonObject obj) throws MalformedURLException { + if ( obj.has("url") ) { + setUrl(obj.get("url").getAsString()); + } + if ( obj.has("method")) { + setMethod(obj.get("method").getAsString()); + } else { + method = "GET"; + } + + Builder builder = ImmutableMap.builder(); + if (obj.has("parameters")) { + + JsonObject data = obj.get("parameters").getAsJsonObject(); + + for ( Entry e : data.entrySet()) { + String k = e.getKey(); + JsonElement val = e.getValue(); + if (val.isJsonArray()) { + JsonArray arr = val.getAsJsonArray(); + String[] par = new String[arr.size()]; + for (int i = 0; i < arr.size(); i++) { + par[i] = arr.get(i).getAsString(); + } + builder.put(k, par); + } else { + String[] par = { val.toString() }; + builder.put(k, par); + } + } + } + parameters = builder.build(); + + } + + /** + * @param url + * The url where to fire a request on. + * @throws MalformedURLException + */ + public void setUrl(String url) throws MalformedURLException { + checkValidUrl(url); + this.url = url; + } + + private void checkValidUrl(String url) throws MalformedURLException { + for( char c : url.toCharArray()) { + if ( !Character.isLetterOrDigit(c)) { + if ( ALLOWED_URL_CHARS.indexOf(c) < 0 ) { + throw new MalformedURLException("Invalid Character in URL request "+url+" character was 0x"+Integer.toHexString(c)); + } + } + } + } + + /** + * @return The to fire a request to. + */ + public String getUrl() { + return url; + } + + + public Map getParameters() { + return parameters; + } + + /** + * @param method + * the method to set + */ + public void setMethod(String method) { + this.method = method; + } + + /** + * @return the method + */ + public String getMethod() { + return method; + } + + public boolean isSafe() { + return (method != null && VALID_SAFE_METHODS.contains(method)); + } + + public boolean isValid() { + return method != null && VALID_METHODS.contains(method) && method != null && url != null; + } + + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/RequestWrapper.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/RequestWrapper.java new file mode 100644 index 00000000..a77104a0 --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/RequestWrapper.java @@ -0,0 +1,103 @@ +package uk.co.tfd.sm.http.batch; + +import java.util.Enumeration; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; + +import org.apache.commons.lang.StringUtils; + +import com.google.common.collect.Iterators; + +public class RequestWrapper extends HttpServletRequestWrapper { + + private Map parameters; + private String method; + private String url; + private String path; + private String query; + + public RequestWrapper(HttpServletRequest request, RequestInfo requestInfo) { + super(request); + this.parameters = requestInfo.getParameters(); + this.method = requestInfo.getMethod(); + if (method == null) { + method = "GET"; + } + this.url = requestInfo.getUrl(); + String[] parts = StringUtils.split(url,"?",2); + + if ( parts.length == 0 ) { + path = null; + query = null; + } else if ( parts.length == 1) { + path = parts[0]; + query = null; + } else { + path = parts[0]; + query = parts[1]; + } + } + + // + // Sling Request parameters + // + + @Override + public String getParameter(String name) { + String[] param = parameters.get(name); + if (param != null && param.length > 0) { + return param[0]; + } + return null; + } + + @SuppressWarnings("rawtypes") + @Override + public Map getParameterMap() { + return parameters; + } + + @SuppressWarnings("rawtypes") + @Override + public Enumeration getParameterNames() { + return Iterators.asEnumeration(parameters.keySet().iterator()); + } + + @Override + public String[] getParameterValues(String name) { + return parameters.get(name); + } + + @Override + public String getMethod() { + return method; + } + + @Override + public String getPathInfo() { + return path; + } + + @Override + public String getPathTranslated() { + return path; + } + + @Override + public String getQueryString() { + return query; + } + + @Override + public String getServletPath() { + return path; + } + + @Override + public String getRequestURI() { + return path; + } + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/ResponseWrapper.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/ResponseWrapper.java new file mode 100644 index 00000000..c430f915 --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/batch/ResponseWrapper.java @@ -0,0 +1,234 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.http.batch; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.io.UnsupportedEncodingException; +import java.util.Dictionary; +import java.util.Hashtable; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpServletResponseWrapper; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ResponseWrapper extends HttpServletResponseWrapper { + private static final Logger LOGGER = LoggerFactory + .getLogger(ResponseWrapper.class); + ByteArrayOutputStream boas = new ByteArrayOutputStream(); + ServletOutputStream servletOutputStream = new ServletOutputStream() { + @Override + public void write(int b) throws IOException { + boas.write(b); + } + }; + + PrintWriter pw; + private OutputStreamWriter osw; + private String type; + private String charset; + private int status = 200; // Default is 200, this is also the statuscode if + // none get's + // set on the response. + private Dictionary headers; + private String statusMessage; + + public ResponseWrapper(HttpServletResponse wrappedResponse) { + super(wrappedResponse); + headers = new Hashtable(); + try { + osw = new OutputStreamWriter(boas, "UTF-8"); + } catch (UnsupportedEncodingException e) { + LOGGER.debug(e.getMessage(), e); + } + pw = new PrintWriter(osw); + } + + @Override + public String getCharacterEncoding() { + return charset; + } + + @Override + public String getContentType() { + return type; + } + + /** + * {@inheritDoc} + * + * @see javax.servlet.ServletResponseWrapper#flushBuffer() + */ + @Override + public void flushBuffer() throws IOException { + } + + /** + * {@inheritDoc} + * + * @see javax.servlet.ServletResponseWrapper#isCommitted() + */ + @Override + public boolean isCommitted() { + // We always return false, so we can keep on outputting. + return false; + } + + /** + * {@inheritDoc} + * + * @see javax.servlet.ServletResponseWrapper#getOutputStream() + */ + @Override + public ServletOutputStream getOutputStream() throws IOException { + return servletOutputStream; + } + + /** + * {@inheritDoc} + * + * @see javax.servlet.ServletResponseWrapper#getWriter() + */ + @Override + public PrintWriter getWriter() throws IOException { + return pw; + } + + @Override + public void setCharacterEncoding(String charset) { + this.charset = charset; + } + + @Override + public void setContentType(String type) { + this.type = type; + headers.put("Content-Type", type); + } + + @Override + public void setContentLength(int len) { + headers.put("Content-Length", Integer.toString(len)); + } + + @Override + public void reset() { + } + + @Override + public void resetBuffer() { + } + + // + // Status + // + + @Override + public void setStatus(int sc) { + this.status = sc; + } + + @Override + public void setStatus(int sc, String sm) { + this.status = sc; + this.statusMessage = sm; + } + + @Override + public void sendError(int sc) throws IOException { + this.status = sc; + } + + @Override + public void sendError(int sc, String msg) throws IOException { + this.status = sc; + this.statusMessage = msg; + } + + // + // Headers + // + + @Override + public void setHeader(String name, String value) { + headers.put(name, value); + } + + @Override + public void addHeader(String name, String value) { + headers.put(name, value); + } + + @Override + public void addIntHeader(String name, int value) { + headers.put(name, String.valueOf(value)); + } + + @Override + public void addDateHeader(String name, long date) { + headers.put(name, String.valueOf(date)); + } + + @Override + public void setDateHeader(String name, long date) { + headers.put(name, String.valueOf(date)); + } + + /** + * @return The headers returned by the underlying response. + */ + public Dictionary getResponseHeaders() { + return headers; + } + + /** + * @return The data written to the underlying response stream. This stream + * is encoded as UTF-8. + * @throws UnsupportedEncodingException + * Failed to encode. + */ + public String getDataAsString() throws UnsupportedEncodingException { + pw.close(); + return boas.toString("utf-8"); + } + + /** + * @return The data written to the underlying response stream. + */ + public OutputStream getDataAsOutputSream() { + pw.flush(); + return boas; + } + + /** + * @return The status code returned by the underlying response + */ + public int getResponseStatus() { + return this.status; + } + + public String getResponseStatusMessage() { + return this.statusMessage; + } + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/content/StaticContentServiceFactory.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/content/StaticContentServiceFactory.java new file mode 100644 index 00000000..8e82741e --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/content/StaticContentServiceFactory.java @@ -0,0 +1,173 @@ +package uk.co.tfd.sm.http.content; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.Dictionary; +import java.util.Hashtable; +import java.util.Map; +import java.util.Map.Entry; + +import javax.servlet.Servlet; +import javax.servlet.ServletException; + +import org.apache.commons.lang.StringUtils; +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Deactivate; +import org.apache.felix.scr.annotations.Property; +import org.osgi.framework.BundleContext; +import org.osgi.framework.ServiceRegistration; +import org.osgi.service.component.ComponentContext; +import org.osgi.service.http.NamespaceException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +@Component(immediate = true, metatype = true) +public class StaticContentServiceFactory { + + + private static final String[] DEFAULT_MAPPINGS = new String[] { + "/devwidgets = static/ui/devwidgets", + "/dev = static/ui/dev", + "/403 = static/ui/dev/403.html", + "/404 = static/ui/dev/404.html", + "/500 = static/ui/dev/500.html", + "/acknowledgements = static/ui/dev/acknowledgements.html", + "/categories = static/ui/dev/allcategories.html", + "/category = static/ui/dev/category.html", + "/content = static/ui/dev/content_profile.html", + "/register = static/ui/dev/create_new_account.html", + "/create.html = static/ui/dev/createnew.html", + "/create = static/ui/dev/createnew.html", + "/favicon.ico = static/ui/dev/favicon.ico", + "/index.html = static/ui/dev/index.html", + "/index = static/ui/dev/index.html", + "/logout = static/ui/dev/logout.html", + "/me.html = static/ui/dev/me.html", + "/me = static/ui/dev/me.html", + "/search = static/ui/dev/search.html", + "/search/sakai2 = static/ui/dev/search_sakai2.html", + "/var = static/var", + "/system/me = static/me.json", + "/tags = static/tags" + + }; + + @Property(value = { + "/devwidgets = static/ui/devwidgets", + "/dev = static/ui/dev", + "/403 = static/ui/dev/403.html", + "/404 = static/ui/dev/404.html", + "/500 = static/ui/dev/500.html", + "/acknowledgements = static/ui/dev/acknowledgements.html", + "/categories = static/ui/dev/allcategories.html", + "/category = static/ui/dev/category.html", + "/content = static/ui/dev/content_profile.html", + "/register = static/ui/dev/create_new_account.html", + "/create.html = static/ui/dev/createnew.html", + "/create = static/ui/dev/createnew.html", + "/favicon.ico = static/ui/dev/favicon.ico", + "/index.html = static/ui/dev/index.html", + "/index = static/ui/dev/index.html", + "/logout = static/ui/dev/logout.html", + "/me.html = static/ui/dev/me.html", + "/me = static/ui/dev/me.html", + "/search = static/ui/dev/search.html", + "/search/sakai2 = static/ui/dev/search_sakai2.html", + "/var = static/var", + "/system/me = static/me.json", + "/tags = static/tags" + }) + private static final String MAPPINGS = "mappings"; + + private static final Logger LOGGER = LoggerFactory + .getLogger(StaticContentServlet.class); + + private Map mimeTypes; + + private Map servlets = Maps.newHashMap(); + + @Activate + public void activate(ComponentContext componentContext) + throws NamespaceException, IOException, ServletException { + BundleContext bundleContext = componentContext.getBundleContext(); + @SuppressWarnings("unchecked") + Dictionary properties = componentContext + .getProperties(); + String[] mappings = (String[]) toStringArray(properties.get(MAPPINGS), + DEFAULT_MAPPINGS); + Map mt = Maps.newHashMap(); + loadMimeTypes(mt, "mime.types"); + loadMimeTypes(mt, "core_mime.types"); + + mimeTypes = ImmutableMap.copyOf(mt); + servlets.clear(); + if (mappings != null && mappings.length > 0) { + + for (String location : mappings) { + String[] mapping = StringUtils.split(location, "=", 3); + String alias = mapping[0].trim(); + String path = mapping[1].trim(); + StaticContentServlet contentServlet = new StaticContentServlet( + alias, path, mimeTypes); + + Dictionary props = new Hashtable(); + props.put("alias", alias); + servlets.put(contentServlet, bundleContext.registerService( + Servlet.class.getName(), contentServlet, props)); + LOGGER.debug("Registering {} as {} {} ", new Object[] { + contentServlet, alias, path }); + } + } + } + + @Deactivate + public void deactivate(ComponentContext componentContext) { + for (Entry e : servlets.entrySet()) { + try { + e.getValue().unregister(); + } catch (Exception ex) { + LOGGER.debug(ex.getMessage(), ex); + } + } + } + + private void loadMimeTypes(Map mt, String mimeTypes) + throws IOException { + BufferedReader in = new BufferedReader(new InputStreamReader(getClass() + .getResourceAsStream(mimeTypes))); + for (String s = in.readLine(); s != null; s = in.readLine()) { + String[] l = new String[] { s }; + int c = s.indexOf("#"); + if (c == 0) { + continue; + } else if (c > 0) { + l = StringUtils.split(s, "#"); + } + + String[] p = StringUtils.split(l[0], " "); + if (p != null && p.length > 1) { + for (int i = 1; i < p.length; i++) { + mt.put(p[i], p[0]); + } + } + } + in.close(); + } + + private String[] toStringArray(Object object, String[] defaultValue) { + if (object == null) { + return defaultValue; + } + if (object instanceof String[]) { + return (String[]) object; + } + return StringUtils.split(String.valueOf(object), ","); + } + + +} diff --git a/extensions/http/src/main/java/uk/co/tfd/sm/http/content/StaticContentServlet.java b/extensions/http/src/main/java/uk/co/tfd/sm/http/content/StaticContentServlet.java new file mode 100644 index 00000000..b768a21b --- /dev/null +++ b/extensions/http/src/main/java/uk/co/tfd/sm/http/content/StaticContentServlet.java @@ -0,0 +1,135 @@ +package uk.co.tfd.sm.http.content; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.util.Map; + +import javax.servlet.ServletConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +public class StaticContentServlet extends HttpServlet { + + /** + * + */ + private static final long serialVersionUID = 3447511065435638313L; + private File basePathFile; + private Map mimeTypes; + private String alias; + private String baseAbsolutePath; + private boolean debugAllowed; + private ThreadLocal copyBuffer = new ThreadLocal(){ + protected ByteBuffer initialValue() { + return ByteBuffer.allocate(100*1024); + }; + }; + + public StaticContentServlet(String alias, String path, + Map mimeTypes) { + this.basePathFile = new File(path); + this.baseAbsolutePath = basePathFile.getAbsolutePath(); + this.mimeTypes = mimeTypes; + this.alias = alias; + } + + @Override + public void init(ServletConfig config) throws ServletException { + super.init(config); + } + + @Override + protected void doGet(HttpServletRequest request, + HttpServletResponse response) throws ServletException, IOException { + String path = request.getRequestURI().substring(alias.length()); + if (path.contains("..")) { + if (debugAllowed + && Boolean.parseBoolean(request.getParameter("debug"))) { + response.sendError( + HttpServletResponse.SC_BAD_REQUEST, + "Alias:[" + alias + "] Request:[" + + request.getRequestURI() + "]"); + } else { + response.sendError(HttpServletResponse.SC_BAD_REQUEST); + } + return; + } + + File f = new File(basePathFile, path); + String n = f.getName(); + String mimeType = getMimeType(n); + if (mimeType != null) { + response.setContentType(mimeType); + } + String accept = request.getHeader("Accept-Encoding"); + boolean acceptsGz = (accept != null && accept.indexOf("gzip") >= 0); + if (!f.getAbsolutePath().startsWith(baseAbsolutePath)) { + if (debugAllowed + && Boolean.parseBoolean(request.getParameter("debug"))) { + response.sendError(HttpServletResponse.SC_FORBIDDEN, "Alias:[" + + alias + "] Request:[" + request.getRequestURI() + + "] File:[" + f.getAbsolutePath() + "]"); + } else { + response.sendError(HttpServletResponse.SC_FORBIDDEN); + } + return; + } + if (f.exists() && f.isFile()) { + File gzfile = new File(f.getAbsolutePath() + ".gz"); + if (acceptsGz && gzfile.exists() && f.isFile()) { + response.setHeader("Content-Encoding", "gzip"); + FileInputStream in = new FileInputStream(gzfile); + + copy(in, response.getOutputStream()); + in.close(); + } else { + FileInputStream in = new FileInputStream(f); + copy(in, response.getOutputStream()); + in.close(); + } + } else { + if (debugAllowed + && Boolean.parseBoolean(request.getParameter("debug"))) { + response.sendError(HttpServletResponse.SC_NOT_FOUND, "Alias:[" + + alias + "] Request:[" + request.getRequestURI() + + "] File:[" + f.getAbsolutePath() + "]"); + } else { + response.sendError(HttpServletResponse.SC_NOT_FOUND); + } + } + } + + private void copy(FileInputStream in, ServletOutputStream outputStream) throws IOException { + ByteBuffer bb = copyBuffer.get(); + FileChannel inc = in.getChannel(); + bb.rewind(); + while(inc.read(bb) >= 0 ) { + if ( bb.position() > 0 ) { + outputStream.write(bb.array(), 0, bb.position()); + bb.rewind(); + } else { + Thread.yield(); + } + } + } + + public String getMimeType(String fileName) { + int i = fileName.lastIndexOf('.'); + String m = null; + if (i > 0) { + String ext = fileName.substring(i + 1); + if (ext.endsWith("/")) { + ext = ext.substring(0, ext.length() - 1); + } + m = mimeTypes.get(ext); + } + return m; + } + +} diff --git a/extensions/http/src/main/resources/META-INF/LICENSE b/extensions/http/src/main/resources/META-INF/LICENSE new file mode 100644 index 00000000..75b52484 --- /dev/null +++ b/extensions/http/src/main/resources/META-INF/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/extensions/http/src/main/resources/META-INF/NOTICE b/extensions/http/src/main/resources/META-INF/NOTICE new file mode 100644 index 00000000..2d587d91 --- /dev/null +++ b/extensions/http/src/main/resources/META-INF/NOTICE @@ -0,0 +1,14 @@ +Sakai Nakamura +Copyright 2009 The Sakai Foundation + +This product includes software developed at +The Sakai Foundation (http://www.sakaiproject.org/). + +----------------------------------------------------------- + +This product includes software (Apache Sling, Apache Felix, Apache Shindig and many other Apache products) +The Apache Software Foundation (http://www.apache.org/). + +Binary distributions of this product contain jars developed and licensed by other third parties, identified by the +LICENSE and NOTICE files included within each jar under the META-INF directory. + diff --git a/extensions/http/src/main/resources/uk/co/tfd/sm/http/content/core_mime.types b/extensions/http/src/main/resources/uk/co/tfd/sm/http/content/core_mime.types new file mode 100644 index 00000000..b74c703e --- /dev/null +++ b/extensions/http/src/main/resources/uk/co/tfd/sm/http/content/core_mime.types @@ -0,0 +1,1273 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# This file is copied from +# +# http://svn.apache.org/repos/asf/httpd/httpd/tags/2.2.14/docs/conf/mime.types +# +# +# NOTE: Please do not edit this file other than syncing its content +# with the mime.types file from the Apache httpd project. +# To add SLING specific default mappings rather ammend the +# mime.types file. +# +# +# See SLING-964, SLING-973 for more information +# + + +# This is a comment. I love comments. + +# This file maps Internet media types to unique file extension(s). +# Although created for httpd, this file is used by many software systems +# and has been placed in the public domain for unlimited redisribution. +# +# The table below contains both registered and (common) unregistered types. +# A type that has no unique extension can be ignored -- they are listed +# here to guide configurations toward known types and to make it easier to +# identify "new" types. File extensions are also commonly used to indicate +# content languages and encodings, so choose them carefully. +# +# Internet media types should be registered as described in RFC 4288. +# The registry is at . +# +# MIME type Extensions +application/activemessage +application/andrew-inset ez +application/applefile +application/applixware aw +application/atom+xml atom +application/atomcat+xml atomcat +application/atomicmail +application/atomsvc+xml atomsvc +application/auth-policy+xml +application/batch-smtp +application/beep+xml +application/cals-1840 +application/ccxml+xml ccxml +application/cea-2018+xml +application/cellml+xml +application/cnrp+xml +application/commonground +application/conference-info+xml +application/cpl+xml +application/csta+xml +application/cstadata+xml +application/cu-seeme cu +application/cybercash +application/davmount+xml davmount +application/dca-rft +application/dec-dx +application/dialog-info+xml +application/dicom +application/dns +application/dvcs +application/ecmascript ecma +application/edi-consent +application/edi-x12 +application/edifact +application/emma+xml emma +application/epp+xml +application/epub+zip epub +application/eshop +application/example +application/fastinfoset +application/fastsoap +application/fits +application/font-tdpfr pfr +application/h224 +application/http +application/hyperstudio stk +application/ibe-key-request+xml +application/ibe-pkg-reply+xml +application/ibe-pp-data +application/iges +application/im-iscomposing+xml +application/index +application/index.cmd +application/index.obj +application/index.response +application/index.vnd +application/iotp +application/ipp +application/isup +application/java-archive jar +application/java-serialized-object ser +application/java-vm class +application/javascript js +application/json json +application/kpml-request+xml +application/kpml-response+xml +application/lost+xml lostxml +application/mac-binhex40 hqx +application/mac-compactpro cpt +application/macwriteii +application/marc mrc +application/mathematica ma nb mb +application/mathml+xml mathml +application/mbms-associated-procedure-description+xml +application/mbms-deregister+xml +application/mbms-envelope+xml +application/mbms-msk+xml +application/mbms-msk-response+xml +application/mbms-protection-description+xml +application/mbms-reception-report+xml +application/mbms-register+xml +application/mbms-register-response+xml +application/mbms-user-service-description+xml +application/mbox mbox +application/media_control+xml +application/mediaservercontrol+xml mscml +application/mikey +application/moss-keys +application/moss-signature +application/mosskey-data +application/mosskey-request +application/mp4 mp4s +application/mpeg4-generic +application/mpeg4-iod +application/mpeg4-iod-xmt +application/msword doc dot +application/mxf mxf +application/nasdata +application/news-checkgroups +application/news-groupinfo +application/news-transmission +application/nss +application/ocsp-request +application/ocsp-response +application/octet-stream bin dms lha lrf lzh so iso dmg dist distz pkg bpk dump elc deploy +application/oda oda +application/oebps-package+xml opf +application/ogg ogx +application/onenote onetoc onetoc2 onetmp onepkg +application/parityfec +application/patch-ops-error+xml xer +application/pdf pdf +application/pgp-encrypted pgp +application/pgp-keys +application/pgp-signature asc sig +application/pics-rules prf +application/pidf+xml +application/pidf-diff+xml +application/pkcs10 p10 +application/pkcs7-mime p7m p7c +application/pkcs7-signature p7s +application/pkix-cert cer +application/pkix-crl crl +application/pkix-pkipath pkipath +application/pkixcmp pki +application/pls+xml pls +application/poc-settings+xml +application/postscript ai eps ps +application/prs.alvestrand.titrax-sheet +application/prs.cww cww +application/prs.nprend +application/prs.plucker +application/qsig +application/rdf+xml rdf +application/reginfo+xml rif +application/relax-ng-compact-syntax rnc +application/remote-printing +application/resource-lists+xml rl +application/resource-lists-diff+xml rld +application/riscos +application/rlmi+xml +application/rls-services+xml rs +application/rsd+xml rsd +application/rss+xml rss +application/rtf rtf +application/rtx +application/samlassertion+xml +application/samlmetadata+xml +application/sbml+xml sbml +application/scvp-cv-request scq +application/scvp-cv-response scs +application/scvp-vp-request spq +application/scvp-vp-response spp +application/sdp sdp +application/set-payment +application/set-payment-initiation setpay +application/set-registration +application/set-registration-initiation setreg +application/sgml +application/sgml-open-catalog +application/shf+xml shf +application/sieve +application/simple-filter+xml +application/simple-message-summary +application/simplesymbolcontainer +application/slate +application/smil +application/smil+xml smi smil +application/soap+fastinfoset +application/soap+xml +application/sparql-query rq +application/sparql-results+xml srx +application/spirits-event+xml +application/srgs gram +application/srgs+xml grxml +application/ssml+xml ssml +application/timestamp-query +application/timestamp-reply +application/tve-trigger +application/ulpfec +application/vemmi +application/vividence.scriptfile +application/vnd.3gpp.bsf+xml +application/vnd.3gpp.pic-bw-large plb +application/vnd.3gpp.pic-bw-small psb +application/vnd.3gpp.pic-bw-var pvb +application/vnd.3gpp.sms +application/vnd.3gpp2.bcmcsinfo+xml +application/vnd.3gpp2.sms +application/vnd.3gpp2.tcap tcap +application/vnd.3m.post-it-notes pwn +application/vnd.accpac.simply.aso aso +application/vnd.accpac.simply.imp imp +application/vnd.acucobol acu +application/vnd.acucorp atc acutc +application/vnd.adobe.air-application-installer-package+zip air +application/vnd.adobe.xdp+xml xdp +application/vnd.adobe.xfdf xfdf +application/vnd.aether.imp +application/vnd.airzip.filesecure.azf azf +application/vnd.airzip.filesecure.azs azs +application/vnd.amazon.ebook azw +application/vnd.americandynamics.acc acc +application/vnd.amiga.ami ami +application/vnd.android.package-archive apk +application/vnd.anser-web-certificate-issue-initiation cii +application/vnd.anser-web-funds-transfer-initiation fti +application/vnd.antix.game-component atx +application/vnd.apple.installer+xml mpkg +application/vnd.arastra.swi swi +application/vnd.audiograph aep +application/vnd.autopackage +application/vnd.avistar+xml +application/vnd.blueice.multipass mpm +application/vnd.bluetooth.ep.oob +application/vnd.bmi bmi +application/vnd.businessobjects rep +application/vnd.cab-jscript +application/vnd.canon-cpdl +application/vnd.canon-lips +application/vnd.cendio.thinlinc.clientconf +application/vnd.chemdraw+xml cdxml +application/vnd.chipnuts.karaoke-mmd mmd +application/vnd.cinderella cdy +application/vnd.cirpack.isdn-ext +application/vnd.claymore cla +application/vnd.clonk.c4group c4g c4d c4f c4p c4u +application/vnd.commerce-battelle +application/vnd.commonspace csp +application/vnd.contact.cmsg cdbcmsg +application/vnd.cosmocaller cmc +application/vnd.crick.clicker clkx +application/vnd.crick.clicker.keyboard clkk +application/vnd.crick.clicker.palette clkp +application/vnd.crick.clicker.template clkt +application/vnd.crick.clicker.wordbank clkw +application/vnd.criticaltools.wbs+xml wbs +application/vnd.ctc-posml pml +application/vnd.ctct.ws+xml +application/vnd.cups-pdf +application/vnd.cups-postscript +application/vnd.cups-ppd ppd +application/vnd.cups-raster +application/vnd.cups-raw +application/vnd.curl.car car +application/vnd.curl.pcurl pcurl +application/vnd.cybank +application/vnd.data-vision.rdz rdz +application/vnd.denovo.fcselayout-link fe_launch +application/vnd.dir-bi.plate-dl-nosuffix +application/vnd.dna dna +application/vnd.dolby.mlp mlp +application/vnd.dolby.mobile.1 +application/vnd.dolby.mobile.2 +application/vnd.dpgraph dpg +application/vnd.dreamfactory dfac +application/vnd.dvb.esgcontainer +application/vnd.dvb.ipdcdftnotifaccess +application/vnd.dvb.ipdcesgaccess +application/vnd.dvb.ipdcroaming +application/vnd.dvb.iptv.alfec-base +application/vnd.dvb.iptv.alfec-enhancement +application/vnd.dvb.notif-aggregate-root+xml +application/vnd.dvb.notif-container+xml +application/vnd.dvb.notif-generic+xml +application/vnd.dvb.notif-ia-msglist+xml +application/vnd.dvb.notif-ia-registration-request+xml +application/vnd.dvb.notif-ia-registration-response+xml +application/vnd.dvb.notif-init+xml +application/vnd.dxr +application/vnd.dynageo geo +application/vnd.ecdis-update +application/vnd.ecowin.chart mag +application/vnd.ecowin.filerequest +application/vnd.ecowin.fileupdate +application/vnd.ecowin.series +application/vnd.ecowin.seriesrequest +application/vnd.ecowin.seriesupdate +application/vnd.emclient.accessrequest+xml +application/vnd.enliven nml +application/vnd.epson.esf esf +application/vnd.epson.msf msf +application/vnd.epson.quickanime qam +application/vnd.epson.salt slt +application/vnd.epson.ssf ssf +application/vnd.ericsson.quickcall +application/vnd.eszigno3+xml es3 et3 +application/vnd.etsi.aoc+xml +application/vnd.etsi.cug+xml +application/vnd.etsi.iptvcommand+xml +application/vnd.etsi.iptvdiscovery+xml +application/vnd.etsi.iptvprofile+xml +application/vnd.etsi.iptvsad-bc+xml +application/vnd.etsi.iptvsad-cod+xml +application/vnd.etsi.iptvsad-npvr+xml +application/vnd.etsi.iptvueprofile+xml +application/vnd.etsi.mcid+xml +application/vnd.etsi.sci+xml +application/vnd.etsi.simservs+xml +application/vnd.eudora.data +application/vnd.ezpix-album ez2 +application/vnd.ezpix-package ez3 +application/vnd.f-secure.mobile +application/vnd.fdf fdf +application/vnd.fdsn.mseed mseed +application/vnd.fdsn.seed seed dataless +application/vnd.ffsns +application/vnd.fints +application/vnd.flographit gph +application/vnd.fluxtime.clip ftc +application/vnd.font-fontforge-sfd +application/vnd.framemaker fm frame maker book +application/vnd.frogans.fnc fnc +application/vnd.frogans.ltf ltf +application/vnd.fsc.weblaunch fsc +application/vnd.fujitsu.oasys oas +application/vnd.fujitsu.oasys2 oa2 +application/vnd.fujitsu.oasys3 oa3 +application/vnd.fujitsu.oasysgp fg5 +application/vnd.fujitsu.oasysprs bh2 +application/vnd.fujixerox.art-ex +application/vnd.fujixerox.art4 +application/vnd.fujixerox.hbpl +application/vnd.fujixerox.ddd ddd +application/vnd.fujixerox.docuworks xdw +application/vnd.fujixerox.docuworks.binder xbd +application/vnd.fut-misnet +application/vnd.fuzzysheet fzs +application/vnd.genomatix.tuxedo txd +application/vnd.geogebra.file ggb +application/vnd.geogebra.tool ggt +application/vnd.geometry-explorer gex gre +application/vnd.gmx gmx +application/vnd.google-earth.kml+xml kml +application/vnd.google-earth.kmz kmz +application/vnd.grafeq gqf gqs +application/vnd.gridmp +application/vnd.groove-account gac +application/vnd.groove-help ghf +application/vnd.groove-identity-message gim +application/vnd.groove-injector grv +application/vnd.groove-tool-message gtm +application/vnd.groove-tool-template tpl +application/vnd.groove-vcard vcg +application/vnd.handheld-entertainment+xml zmm +application/vnd.hbci hbci +application/vnd.hcl-bireports +application/vnd.hhe.lesson-player les +application/vnd.hp-hpgl hpgl +application/vnd.hp-hpid hpid +application/vnd.hp-hps hps +application/vnd.hp-jlyt jlt +application/vnd.hp-pcl pcl +application/vnd.hp-pclxl pclxl +application/vnd.httphone +application/vnd.hydrostatix.sof-data sfd-hdstx +application/vnd.hzn-3d-crossword x3d +application/vnd.ibm.afplinedata +application/vnd.ibm.electronic-media +application/vnd.ibm.minipay mpy +application/vnd.ibm.modcap afp listafp list3820 +application/vnd.ibm.rights-management irm +application/vnd.ibm.secure-container sc +application/vnd.iccprofile icc icm +application/vnd.igloader igl +application/vnd.immervision-ivp ivp +application/vnd.immervision-ivu ivu +application/vnd.informedcontrol.rms+xml +application/vnd.informix-visionary +application/vnd.intercon.formnet xpw xpx +application/vnd.intertrust.digibox +application/vnd.intertrust.nncp +application/vnd.intu.qbo qbo +application/vnd.intu.qfx qfx +application/vnd.iptc.g2.conceptitem+xml +application/vnd.iptc.g2.knowledgeitem+xml +application/vnd.iptc.g2.newsitem+xml +application/vnd.iptc.g2.packageitem+xml +application/vnd.ipunplugged.rcprofile rcprofile +application/vnd.irepository.package+xml irp +application/vnd.is-xpr xpr +application/vnd.jam jam +application/vnd.japannet-directory-service +application/vnd.japannet-jpnstore-wakeup +application/vnd.japannet-payment-wakeup +application/vnd.japannet-registration +application/vnd.japannet-registration-wakeup +application/vnd.japannet-setstore-wakeup +application/vnd.japannet-verification +application/vnd.japannet-verification-wakeup +application/vnd.jcp.javame.midlet-rms rms +application/vnd.jisp jisp +application/vnd.joost.joda-archive joda +application/vnd.kahootz ktz ktr +application/vnd.kde.karbon karbon +application/vnd.kde.kchart chrt +application/vnd.kde.kformula kfo +application/vnd.kde.kivio flw +application/vnd.kde.kontour kon +application/vnd.kde.kpresenter kpr kpt +application/vnd.kde.kspread ksp +application/vnd.kde.kword kwd kwt +application/vnd.kenameaapp htke +application/vnd.kidspiration kia +application/vnd.kinar kne knp +application/vnd.koan skp skd skt skm +application/vnd.kodak-descriptor sse +application/vnd.liberty-request+xml +application/vnd.llamagraphics.life-balance.desktop lbd +application/vnd.llamagraphics.life-balance.exchange+xml lbe +application/vnd.lotus-1-2-3 123 +application/vnd.lotus-approach apr +application/vnd.lotus-freelance pre +application/vnd.lotus-notes nsf +application/vnd.lotus-organizer org +application/vnd.lotus-screencam scm +application/vnd.lotus-wordpro lwp +application/vnd.macports.portpkg portpkg +application/vnd.marlin.drm.actiontoken+xml +application/vnd.marlin.drm.conftoken+xml +application/vnd.marlin.drm.license+xml +application/vnd.marlin.drm.mdcf +application/vnd.mcd mcd +application/vnd.medcalcdata mc1 +application/vnd.mediastation.cdkey cdkey +application/vnd.meridian-slingshot +application/vnd.mfer mwf +application/vnd.mfmp mfm +application/vnd.micrografx.flo flo +application/vnd.micrografx.igx igx +application/vnd.mif mif +application/vnd.minisoft-hp3000-save +application/vnd.mitsubishi.misty-guard.trustweb +application/vnd.mobius.daf daf +application/vnd.mobius.dis dis +application/vnd.mobius.mbk mbk +application/vnd.mobius.mqy mqy +application/vnd.mobius.msl msl +application/vnd.mobius.plc plc +application/vnd.mobius.txf txf +application/vnd.mophun.application mpn +application/vnd.mophun.certificate mpc +application/vnd.motorola.flexsuite +application/vnd.motorola.flexsuite.adsi +application/vnd.motorola.flexsuite.fis +application/vnd.motorola.flexsuite.gotap +application/vnd.motorola.flexsuite.kmr +application/vnd.motorola.flexsuite.ttc +application/vnd.motorola.flexsuite.wem +application/vnd.motorola.iprm +application/vnd.mozilla.xul+xml xul +application/vnd.ms-artgalry cil +application/vnd.ms-asf +application/vnd.ms-cab-compressed cab +application/vnd.ms-excel xls xlm xla xlc xlt xlw +application/vnd.ms-excel.addin.macroenabled.12 xlam +application/vnd.ms-excel.sheet.binary.macroenabled.12 xlsb +application/vnd.ms-excel.sheet.macroenabled.12 xlsm +application/vnd.ms-excel.template.macroenabled.12 xltm +application/vnd.ms-fontobject eot +application/vnd.ms-htmlhelp chm +application/vnd.ms-ims ims +application/vnd.ms-lrm lrm +application/vnd.ms-pki.seccat cat +application/vnd.ms-pki.stl stl +application/vnd.ms-playready.initiator+xml +application/vnd.ms-powerpoint ppt pps pot +application/vnd.ms-powerpoint.addin.macroenabled.12 ppam +application/vnd.ms-powerpoint.presentation.macroenabled.12 pptm +application/vnd.ms-powerpoint.slide.macroenabled.12 sldm +application/vnd.ms-powerpoint.slideshow.macroenabled.12 ppsm +application/vnd.ms-powerpoint.template.macroenabled.12 potm +application/vnd.ms-project mpp mpt +application/vnd.ms-tnef +application/vnd.ms-wmdrm.lic-chlg-req +application/vnd.ms-wmdrm.lic-resp +application/vnd.ms-wmdrm.meter-chlg-req +application/vnd.ms-wmdrm.meter-resp +application/vnd.ms-word.document.macroenabled.12 docm +application/vnd.ms-word.template.macroenabled.12 dotm +application/vnd.ms-works wps wks wcm wdb +application/vnd.ms-wpl wpl +application/vnd.ms-xpsdocument xps +application/vnd.mseq mseq +application/vnd.msign +application/vnd.multiad.creator +application/vnd.multiad.creator.cif +application/vnd.music-niff +application/vnd.musician mus +application/vnd.muvee.style msty +application/vnd.ncd.control +application/vnd.ncd.reference +application/vnd.nervana +application/vnd.netfpx +application/vnd.neurolanguage.nlu nlu +application/vnd.noblenet-directory nnd +application/vnd.noblenet-sealer nns +application/vnd.noblenet-web nnw +application/vnd.nokia.catalogs +application/vnd.nokia.conml+wbxml +application/vnd.nokia.conml+xml +application/vnd.nokia.isds-radio-presets +application/vnd.nokia.iptv.config+xml +application/vnd.nokia.landmark+wbxml +application/vnd.nokia.landmark+xml +application/vnd.nokia.landmarkcollection+xml +application/vnd.nokia.n-gage.ac+xml +application/vnd.nokia.n-gage.data ngdat +application/vnd.nokia.n-gage.symbian.install n-gage +application/vnd.nokia.ncd +application/vnd.nokia.pcd+wbxml +application/vnd.nokia.pcd+xml +application/vnd.nokia.radio-preset rpst +application/vnd.nokia.radio-presets rpss +application/vnd.novadigm.edm edm +application/vnd.novadigm.edx edx +application/vnd.novadigm.ext ext +application/vnd.oasis.opendocument.chart odc +application/vnd.oasis.opendocument.chart-template otc +application/vnd.oasis.opendocument.database odb +application/vnd.oasis.opendocument.formula odf +application/vnd.oasis.opendocument.formula-template odft +application/vnd.oasis.opendocument.graphics odg +application/vnd.oasis.opendocument.graphics-template otg +application/vnd.oasis.opendocument.image odi +application/vnd.oasis.opendocument.image-template oti +application/vnd.oasis.opendocument.presentation odp +application/vnd.oasis.opendocument.presentation-template otp +application/vnd.oasis.opendocument.spreadsheet ods +application/vnd.oasis.opendocument.spreadsheet-template ots +application/vnd.oasis.opendocument.text odt +application/vnd.oasis.opendocument.text-master otm +application/vnd.oasis.opendocument.text-template ott +application/vnd.oasis.opendocument.text-web oth +application/vnd.obn +application/vnd.olpc-sugar xo +application/vnd.oma-scws-config +application/vnd.oma-scws-http-request +application/vnd.oma-scws-http-response +application/vnd.oma.bcast.associated-procedure-parameter+xml +application/vnd.oma.bcast.drm-trigger+xml +application/vnd.oma.bcast.imd+xml +application/vnd.oma.bcast.ltkm +application/vnd.oma.bcast.notification+xml +application/vnd.oma.bcast.provisioningtrigger +application/vnd.oma.bcast.sgboot +application/vnd.oma.bcast.sgdd+xml +application/vnd.oma.bcast.sgdu +application/vnd.oma.bcast.simple-symbol-container +application/vnd.oma.bcast.smartcard-trigger+xml +application/vnd.oma.bcast.sprov+xml +application/vnd.oma.bcast.stkm +application/vnd.oma.dcd +application/vnd.oma.dcdc +application/vnd.oma.dd2+xml dd2 +application/vnd.oma.drm.risd+xml +application/vnd.oma.group-usage-list+xml +application/vnd.oma.poc.detailed-progress-report+xml +application/vnd.oma.poc.final-report+xml +application/vnd.oma.poc.groups+xml +application/vnd.oma.poc.invocation-descriptor+xml +application/vnd.oma.poc.optimized-progress-report+xml +application/vnd.oma.xcap-directory+xml +application/vnd.omads-email+xml +application/vnd.omads-file+xml +application/vnd.omads-folder+xml +application/vnd.omaloc-supl-init +application/vnd.openofficeorg.extension oxt +application/vnd.openxmlformats-officedocument.presentationml.presentation pptx +application/vnd.openxmlformats-officedocument.presentationml.slide sldx +application/vnd.openxmlformats-officedocument.presentationml.slideshow ppsx +application/vnd.openxmlformats-officedocument.presentationml.template potx +application/vnd.openxmlformats-officedocument.spreadsheetml.sheet xlsx +application/vnd.openxmlformats-officedocument.spreadsheetml.template xltx +application/vnd.openxmlformats-officedocument.wordprocessingml.document docx +application/vnd.openxmlformats-officedocument.wordprocessingml.template dotx +application/vnd.osa.netdeploy +application/vnd.osgi.bundle +application/vnd.osgi.dp dp +application/vnd.otps.ct-kip+xml +application/vnd.palm pdb pqa oprc +application/vnd.paos.xml +application/vnd.pg.format str +application/vnd.pg.osasli ei6 +application/vnd.piaccess.application-licence +application/vnd.picsel efif +application/vnd.poc.group-advertisement+xml +application/vnd.pocketlearn plf +application/vnd.powerbuilder6 pbd +application/vnd.powerbuilder6-s +application/vnd.powerbuilder7 +application/vnd.powerbuilder7-s +application/vnd.powerbuilder75 +application/vnd.powerbuilder75-s +application/vnd.preminet +application/vnd.previewsystems.box box +application/vnd.proteus.magazine mgz +application/vnd.publishare-delta-tree qps +application/vnd.pvi.ptid1 ptid +application/vnd.pwg-multiplexed +application/vnd.pwg-xhtml-print+xml +application/vnd.qualcomm.brew-app-res +application/vnd.quark.quarkxpress qxd qxt qwd qwt qxl qxb +application/vnd.rapid +application/vnd.recordare.musicxml mxl +application/vnd.recordare.musicxml+xml musicxml +application/vnd.renlearn.rlprint +application/vnd.rim.cod cod +application/vnd.rn-realmedia rm +application/vnd.route66.link66+xml link66 +application/vnd.ruckus.download +application/vnd.s3sms +application/vnd.sbm.cid +application/vnd.sbm.mid2 +application/vnd.scribus +application/vnd.sealed.3df +application/vnd.sealed.csf +application/vnd.sealed.doc +application/vnd.sealed.eml +application/vnd.sealed.mht +application/vnd.sealed.net +application/vnd.sealed.ppt +application/vnd.sealed.tiff +application/vnd.sealed.xls +application/vnd.sealedmedia.softseal.html +application/vnd.sealedmedia.softseal.pdf +application/vnd.seemail see +application/vnd.sema sema +application/vnd.semd semd +application/vnd.semf semf +application/vnd.shana.informed.formdata ifm +application/vnd.shana.informed.formtemplate itp +application/vnd.shana.informed.interchange iif +application/vnd.shana.informed.package ipk +application/vnd.simtech-mindmapper twd twds +application/vnd.smaf mmf +application/vnd.smart.teacher teacher +application/vnd.software602.filler.form+xml +application/vnd.software602.filler.form-xml-zip +application/vnd.solent.sdkm+xml sdkm sdkd +application/vnd.spotfire.dxp dxp +application/vnd.spotfire.sfs sfs +application/vnd.sss-cod +application/vnd.sss-dtf +application/vnd.sss-ntf +application/vnd.stardivision.calc sdc +application/vnd.stardivision.draw sda +application/vnd.stardivision.impress sdd +application/vnd.stardivision.math smf +application/vnd.stardivision.writer sdw +application/vnd.stardivision.writer vor +application/vnd.stardivision.writer-global sgl +application/vnd.street-stream +application/vnd.sun.xml.calc sxc +application/vnd.sun.xml.calc.template stc +application/vnd.sun.xml.draw sxd +application/vnd.sun.xml.draw.template std +application/vnd.sun.xml.impress sxi +application/vnd.sun.xml.impress.template sti +application/vnd.sun.xml.math sxm +application/vnd.sun.xml.writer sxw +application/vnd.sun.xml.writer.global sxg +application/vnd.sun.xml.writer.template stw +application/vnd.sun.wadl+xml +application/vnd.sus-calendar sus susp +application/vnd.svd svd +application/vnd.swiftview-ics +application/vnd.symbian.install sis sisx +application/vnd.syncml+xml xsm +application/vnd.syncml.dm+wbxml bdm +application/vnd.syncml.dm+xml xdm +application/vnd.syncml.dm.notification +application/vnd.syncml.ds.notification +application/vnd.tao.intent-module-archive tao +application/vnd.tmobile-livetv tmo +application/vnd.trid.tpt tpt +application/vnd.triscape.mxs mxs +application/vnd.trueapp tra +application/vnd.truedoc +application/vnd.ufdl ufd ufdl +application/vnd.uiq.theme utz +application/vnd.umajin umj +application/vnd.unity unityweb +application/vnd.uoml+xml uoml +application/vnd.uplanet.alert +application/vnd.uplanet.alert-wbxml +application/vnd.uplanet.bearer-choice +application/vnd.uplanet.bearer-choice-wbxml +application/vnd.uplanet.cacheop +application/vnd.uplanet.cacheop-wbxml +application/vnd.uplanet.channel +application/vnd.uplanet.channel-wbxml +application/vnd.uplanet.list +application/vnd.uplanet.list-wbxml +application/vnd.uplanet.listcmd +application/vnd.uplanet.listcmd-wbxml +application/vnd.uplanet.signal +application/vnd.vcx vcx +application/vnd.vd-study +application/vnd.vectorworks +application/vnd.vidsoft.vidconference +application/vnd.visio vsd vst vss vsw +application/vnd.visionary vis +application/vnd.vividence.scriptfile +application/vnd.vsf vsf +application/vnd.wap.sic +application/vnd.wap.slc +application/vnd.wap.wbxml wbxml +application/vnd.wap.wmlc wmlc +application/vnd.wap.wmlscriptc wmlsc +application/vnd.webturbo wtb +application/vnd.wfa.wsc +application/vnd.wmc +application/vnd.wmf.bootstrap +application/vnd.wordperfect wpd +application/vnd.wqd wqd +application/vnd.wrq-hp3000-labelled +application/vnd.wt.stf stf +application/vnd.wv.csp+wbxml +application/vnd.wv.csp+xml +application/vnd.wv.ssp+xml +application/vnd.xara xar +application/vnd.xfdl xfdl +application/vnd.xfdl.webform +application/vnd.xmi+xml +application/vnd.xmpie.cpkg +application/vnd.xmpie.dpkg +application/vnd.xmpie.plan +application/vnd.xmpie.ppkg +application/vnd.xmpie.xlim +application/vnd.yamaha.hv-dic hvd +application/vnd.yamaha.hv-script hvs +application/vnd.yamaha.hv-voice hvp +application/vnd.yamaha.openscoreformat osf +application/vnd.yamaha.openscoreformat.osfpvg+xml osfpvg +application/vnd.yamaha.smaf-audio saf +application/vnd.yamaha.smaf-phrase spf +application/vnd.yellowriver-custom-menu cmp +application/vnd.zul zir zirz +application/vnd.zzazz.deck+xml zaz +application/voicexml+xml vxml +application/watcherinfo+xml +application/whoispp-query +application/whoispp-response +application/winhlp hlp +application/wita +application/wordperfect5.1 +application/wsdl+xml wsdl +application/wspolicy+xml wspolicy +application/x-abiword abw +application/x-ace-compressed ace +application/x-authorware-bin aab x32 u32 vox +application/x-authorware-map aam +application/x-authorware-seg aas +application/x-bcpio bcpio +application/x-bittorrent torrent +application/x-bzip bz +application/x-bzip2 bz2 boz +application/x-cdlink vcd +application/x-chat chat +application/x-chess-pgn pgn +application/x-compress +application/x-cpio cpio +application/x-csh csh +application/x-debian-package deb udeb +application/x-director dir dcr dxr cst cct cxt w3d fgd swa +application/x-doom wad +application/x-dtbncx+xml ncx +application/x-dtbook+xml dtb +application/x-dtbresource+xml res +application/x-dvi dvi +application/x-font-bdf bdf +application/x-font-dos +application/x-font-framemaker +application/x-font-ghostscript gsf +application/x-font-libgrx +application/x-font-linux-psf psf +application/x-font-otf otf +application/x-font-pcf pcf +application/x-font-snf snf +application/x-font-speedo +application/x-font-sunos-news +application/x-font-ttf ttf ttc +application/x-font-type1 pfa pfb pfm afm +application/x-font-vfont +application/x-futuresplash spl +application/x-gnumeric gnumeric +application/x-gtar gtar +application/x-gzip +application/x-hdf hdf +application/x-java-jnlp-file jnlp +application/x-latex latex +application/x-mobipocket-ebook prc mobi +application/x-ms-application application +application/x-ms-wmd wmd +application/x-ms-wmz wmz +application/x-ms-xbap xbap +application/x-msaccess mdb +application/x-msbinder obd +application/x-mscardfile crd +application/x-msclip clp +application/x-msdownload exe dll com bat msi +application/x-msmediaview mvb m13 m14 +application/x-msmetafile wmf +application/x-msmoney mny +application/x-mspublisher pub +application/x-msschedule scd +application/x-msterminal trm +application/x-mswrite wri +application/x-netcdf nc cdf +application/x-pkcs12 p12 pfx +application/x-pkcs7-certificates p7b spc +application/x-pkcs7-certreqresp p7r +application/x-rar-compressed rar +application/x-sh sh +application/x-shar shar +application/x-shockwave-flash swf +application/x-silverlight-app xap +application/x-stuffit sit +application/x-stuffitx sitx +application/x-sv4cpio sv4cpio +application/x-sv4crc sv4crc +application/x-tar tar +application/x-tcl tcl +application/x-tex tex +application/x-tex-tfm tfm +application/x-texinfo texinfo texi +application/x-ustar ustar +application/x-wais-source src +application/x-x509-ca-cert der crt +application/x-xfig fig +application/x-xpinstall xpi +application/x400-bp +application/xcap-att+xml +application/xcap-caps+xml +application/xcap-el+xml +application/xcap-error+xml +application/xcap-ns+xml +application/xcon-conference-info-diff+xml +application/xcon-conference-info+xml +application/xenc+xml xenc +application/xhtml+xml xhtml xht +application/xhtml-voice+xml +application/xml xml xsl +application/xml-dtd dtd +application/xml-external-parsed-entity +application/xmpp+xml +application/xop+xml xop +application/xslt+xml xslt +application/xspf+xml xspf +application/xv+xml mxml xhvml xvml xvm +application/zip zip +audio/32kadpcm +audio/3gpp +audio/3gpp2 +audio/ac3 +audio/adpcm adp +audio/amr +audio/amr-wb +audio/amr-wb+ +audio/asc +audio/basic au snd +audio/bv16 +audio/bv32 +audio/clearmode +audio/cn +audio/dat12 +audio/dls +audio/dsr-es201108 +audio/dsr-es202050 +audio/dsr-es202211 +audio/dsr-es202212 +audio/dvi4 +audio/eac3 +audio/evrc +audio/evrc-qcp +audio/evrc0 +audio/evrc1 +audio/evrcb +audio/evrcb0 +audio/evrcb1 +audio/evrcwb +audio/evrcwb0 +audio/evrcwb1 +audio/example +audio/g719 +audio/g722 +audio/g7221 +audio/g723 +audio/g726-16 +audio/g726-24 +audio/g726-32 +audio/g726-40 +audio/g728 +audio/g729 +audio/g7291 +audio/g729d +audio/g729e +audio/gsm +audio/gsm-efr +audio/ilbc +audio/l16 +audio/l20 +audio/l24 +audio/l8 +audio/lpc +audio/midi mid midi kar rmi +audio/mobile-xmf +audio/mp4 mp4a +audio/mp4a-latm +audio/mpa +audio/mpa-robust +audio/mpeg mpga mp2 mp2a mp3 m2a m3a +audio/mpeg4-generic +audio/ogg oga ogg spx +audio/parityfec +audio/pcma +audio/pcma-wb +audio/pcmu-wb +audio/pcmu +audio/prs.sid +audio/qcelp +audio/red +audio/rtp-enc-aescm128 +audio/rtp-midi +audio/rtx +audio/smv +audio/smv0 +audio/smv-qcp +audio/sp-midi +audio/t140c +audio/t38 +audio/telephone-event +audio/tone +audio/ulpfec +audio/vdvi +audio/vmr-wb +audio/vnd.3gpp.iufp +audio/vnd.4sb +audio/vnd.audiokoz +audio/vnd.celp +audio/vnd.cisco.nse +audio/vnd.cmles.radio-events +audio/vnd.cns.anp1 +audio/vnd.cns.inf1 +audio/vnd.digital-winds eol +audio/vnd.dlna.adts +audio/vnd.dolby.heaac.1 +audio/vnd.dolby.heaac.2 +audio/vnd.dolby.mlp +audio/vnd.dolby.mps +audio/vnd.dolby.pl2 +audio/vnd.dolby.pl2x +audio/vnd.dolby.pl2z +audio/vnd.dts dts +audio/vnd.dts.hd dtshd +audio/vnd.everad.plj +audio/vnd.hns.audio +audio/vnd.lucent.voice lvp +audio/vnd.ms-playready.media.pya pya +audio/vnd.nokia.mobile-xmf +audio/vnd.nortel.vbk +audio/vnd.nuera.ecelp4800 ecelp4800 +audio/vnd.nuera.ecelp7470 ecelp7470 +audio/vnd.nuera.ecelp9600 ecelp9600 +audio/vnd.octel.sbc +audio/vnd.qcelp +audio/vnd.rhetorex.32kadpcm +audio/vnd.sealedmedia.softseal.mpeg +audio/vnd.vmx.cvsd +audio/vorbis +audio/vorbis-config +audio/x-aac aac +audio/x-aiff aif aiff aifc +audio/x-mpegurl m3u +audio/x-ms-wax wax +audio/x-ms-wma wma +audio/x-pn-realaudio ram ra +audio/x-pn-realaudio-plugin rmp +audio/x-wav wav +chemical/x-cdx cdx +chemical/x-cif cif +chemical/x-cmdf cmdf +chemical/x-cml cml +chemical/x-csml csml +chemical/x-pdb +chemical/x-xyz xyz +image/bmp bmp +image/cgm cgm +image/example +image/fits +image/g3fax g3 +image/gif gif +image/ief ief +image/jp2 +image/jpeg jpeg jpg jpe +image/jpm +image/jpx +image/naplps +image/png png +image/prs.btif btif +image/prs.pti +image/svg+xml svg svgz +image/t38 +image/tiff tiff tif +image/tiff-fx +image/vnd.adobe.photoshop psd +image/vnd.cns.inf2 +image/vnd.djvu djvu djv +image/vnd.dwg dwg +image/vnd.dxf dxf +image/vnd.fastbidsheet fbs +image/vnd.fpx fpx +image/vnd.fst fst +image/vnd.fujixerox.edmics-mmr mmr +image/vnd.fujixerox.edmics-rlc rlc +image/vnd.globalgraphics.pgb +image/vnd.microsoft.icon +image/vnd.mix +image/vnd.ms-modi mdi +image/vnd.net-fpx npx +image/vnd.radiance +image/vnd.sealed.png +image/vnd.sealedmedia.softseal.gif +image/vnd.sealedmedia.softseal.jpg +image/vnd.svf +image/vnd.wap.wbmp wbmp +image/vnd.xiff xif +image/x-cmu-raster ras +image/x-cmx cmx +image/x-freehand fh fhc fh4 fh5 fh7 +image/x-icon ico +image/x-pcx pcx +image/x-pict pic pct +image/x-portable-anymap pnm +image/x-portable-bitmap pbm +image/x-portable-graymap pgm +image/x-portable-pixmap ppm +image/x-rgb rgb +image/x-xbitmap xbm +image/x-xpixmap xpm +image/x-xwindowdump xwd +message/cpim +message/delivery-status +message/disposition-notification +message/example +message/external-body +message/global +message/global-delivery-status +message/global-disposition-notification +message/global-headers +message/http +message/imdn+xml +message/news +message/partial +message/rfc822 eml mime +message/s-http +message/sip +message/sipfrag +message/tracking-status +message/vnd.si.simp +model/example +model/iges igs iges +model/mesh msh mesh silo +model/vnd.dwf dwf +model/vnd.flatland.3dml +model/vnd.gdl gdl +model/vnd.gs-gdl +model/vnd.gs.gdl +model/vnd.gtw gtw +model/vnd.moml+xml +model/vnd.mts mts +model/vnd.parasolid.transmit.binary +model/vnd.parasolid.transmit.text +model/vnd.vtu vtu +model/vrml wrl vrml +multipart/alternative +multipart/appledouble +multipart/byteranges +multipart/digest +multipart/encrypted +multipart/example +multipart/form-data +multipart/header-set +multipart/mixed +multipart/parallel +multipart/related +multipart/report +multipart/signed +multipart/voice-message +text/calendar ics ifb +text/css css +text/csv csv +text/directory +text/dns +text/ecmascript +text/enriched +text/example +text/html html htm +text/javascript +text/parityfec +text/plain txt text conf def list log in +text/prs.fallenstein.rst +text/prs.lines.tag dsc +text/red +text/rfc822-headers +text/richtext rtx +text/rtf +text/rtp-enc-aescm128 +text/rtx +text/sgml sgml sgm +text/t140 +text/tab-separated-values tsv +text/troff t tr roff man me ms +text/ulpfec +text/uri-list uri uris urls +text/vnd.abc +text/vnd.curl curl +text/vnd.curl.dcurl dcurl +text/vnd.curl.scurl scurl +text/vnd.curl.mcurl mcurl +text/vnd.dmclientscript +text/vnd.esmertec.theme-descriptor +text/vnd.fly fly +text/vnd.fmi.flexstor flx +text/vnd.graphviz gv +text/vnd.in3d.3dml 3dml +text/vnd.in3d.spot spot +text/vnd.iptc.newsml +text/vnd.iptc.nitf +text/vnd.latex-z +text/vnd.motorola.reflex +text/vnd.ms-mediapackage +text/vnd.net2phone.commcenter.command +text/vnd.si.uricatalogue +text/vnd.sun.j2me.app-descriptor jad +text/vnd.trolltech.linguist +text/vnd.wap.si +text/vnd.wap.sl +text/vnd.wap.wml wml +text/vnd.wap.wmlscript wmls +text/x-asm s asm +text/x-c c cc cxx cpp h hh dic +text/x-fortran f for f77 f90 +text/x-pascal p pas +text/x-java-source java +text/x-setext etx +text/x-uuencode uu +text/x-vcalendar vcs +text/x-vcard vcf +text/xml +text/xml-external-parsed-entity +video/3gpp 3gp +video/3gpp-tt +video/3gpp2 3g2 +video/bmpeg +video/bt656 +video/celb +video/dv +video/example +video/h261 h261 +video/h263 h263 +video/h263-1998 +video/h263-2000 +video/h264 h264 +video/jpeg jpgv +video/jpeg2000 +video/jpm jpm jpgm +video/mj2 mj2 mjp2 +video/mp1s +video/mp2p +video/mp2t +video/mp4 mp4 mp4v mpg4 +video/mp4v-es +video/mpeg mpeg mpg mpe m1v m2v +video/mpeg4-generic +video/mpv +video/nv +video/ogg ogv +video/parityfec +video/pointer +video/quicktime qt mov +video/raw +video/rtp-enc-aescm128 +video/rtx +video/smpte292m +video/ulpfec +video/vc1 +video/vnd.cctv +video/vnd.dlna.mpeg-tts +video/vnd.fvt fvt +video/vnd.hns.video +video/vnd.iptvforum.1dparityfec-1010 +video/vnd.iptvforum.1dparityfec-2005 +video/vnd.iptvforum.2dparityfec-1010 +video/vnd.iptvforum.2dparityfec-2005 +video/vnd.iptvforum.ttsavc +video/vnd.iptvforum.ttsmpeg2 +video/vnd.motorola.video +video/vnd.motorola.videop +video/vnd.mpegurl mxu m4u +video/vnd.ms-playready.media.pyv pyv +video/vnd.nokia.interleaved-multimedia +video/vnd.nokia.videovoip +video/vnd.objectvideo +video/vnd.sealed.mpeg1 +video/vnd.sealed.mpeg4 +video/vnd.sealed.swf +video/vnd.sealedmedia.softseal.mov +video/vnd.vivo viv +video/x-f4v f4v +video/x-fli fli +video/x-flv flv +video/x-m4v m4v +video/x-ms-asf asf asx +video/x-ms-wm wm +video/x-ms-wmv wmv +video/x-ms-wmx wmx +video/x-ms-wvx wvx +video/x-msvideo avi +video/x-sgi-movie movie +x-conference/x-cooltalk ice \ No newline at end of file diff --git a/extensions/http/src/main/resources/uk/co/tfd/sm/http/content/mime.types b/extensions/http/src/main/resources/uk/co/tfd/sm/http/content/mime.types new file mode 100644 index 00000000..23894e03 --- /dev/null +++ b/extensions/http/src/main/resources/uk/co/tfd/sm/http/content/mime.types @@ -0,0 +1,31 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# Sling extensions to the core_mime.types +# + +application/compress z +application/msexcel xls xlsx +application/mspowerpoint ppt pptx +application/octet-stream class exe msi +application/photoshop psd +image/pict pict +text/plain apt +text/rtf rtf diff --git a/extensions/http/src/test/java/uk/co/tfd/sm/http/ServerProtectionServiceImplTest.java b/extensions/http/src/test/java/uk/co/tfd/sm/http/ServerProtectionServiceImplTest.java new file mode 100644 index 00000000..281d141e --- /dev/null +++ b/extensions/http/src/test/java/uk/co/tfd/sm/http/ServerProtectionServiceImplTest.java @@ -0,0 +1,128 @@ +package uk.co.tfd.sm.http; + +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.security.NoSuchAlgorithmException; +import java.util.Map; +import java.util.Vector; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang.StringUtils; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; + +import uk.co.tfd.sm.api.http.ServerProtectionService.Action; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +public class ServerProtectionServiceImplTest { + + private ServerProtectionServiceImpl serverProtectionServiceImpl; + + @Before + public void before() throws NoSuchAlgorithmException, UnsupportedEncodingException { + serverProtectionServiceImpl = new ServerProtectionServiceImpl(); + Map properties = ImmutableMap.of( + "secret", (Object)"change in production", + "whitelist", new String[]{"/whitelist"}, + "hosts", new String[]{ + "referer;localhost:8080;http://localhost:8080/", + "csrf;localhost:8080;", + "usercontent;localhost:8082;", + "redirect;localhost:8080;http://localhost:8082" }); + serverProtectionServiceImpl.activate(properties); + } + + + @Test + public void testSimpleAction() { + Map noParameters = ImmutableMap.of(); + Vector noReferers = new Vector(); + // gets to the application host are ok + Assert.assertEquals(Action.OK, serverProtectionServiceImpl.checkAction(getRequest("GET","http://localhost:8080/testng123",null,noParameters, noReferers))); + // check the content host is ok + Assert.assertEquals(Action.OK, serverProtectionServiceImpl.checkAction(getRequest("GET","http://localhost:8082/testng123",null,noParameters, noReferers))); + // posts to application hosts need referrers + Assert.assertEquals(Action.FORBID, serverProtectionServiceImpl.checkAction(getRequest("POST","http://localhost:8080/testng123",null,noParameters, noReferers))); + // cant do gets to hosts that are not configured + Assert.assertEquals(Action.FORBID, serverProtectionServiceImpl.checkAction(getRequest("GET","http://invalidHost/testng123",null,noParameters, noReferers))); + } + + @Test + public void testPostReferer() { + Map noParameters = ImmutableMap.of(); + Vector noReferers = new Vector(); + Vector referers = new Vector(); + Assert.assertEquals(Action.FORBID, serverProtectionServiceImpl.checkAction(getRequest("POST","http://localhost:8080/testng123",null,noParameters, noReferers))); + referers.clear(); + referers.add("/dada"); + Assert.assertEquals(Action.OK, serverProtectionServiceImpl.checkAction(getRequest("POST","http://localhost:8080/testng123",null,noParameters, referers))); + referers.clear(); + referers.add("http://localhost:8080/OkLocation"); + Assert.assertEquals(Action.OK, serverProtectionServiceImpl.checkAction(getRequest("POST","http://localhost:8080/testng123",null,noParameters, referers))); + referers.clear(); + referers.add("http://localhost:8081/BadPort"); + Assert.assertEquals(Action.FORBID, serverProtectionServiceImpl.checkAction(getRequest("POST","http://localhost:8080/testng123",null,noParameters, referers))); + referers.clear(); + referers.add("https://localhost:8080/BadProtocol"); + Assert.assertEquals(Action.FORBID, serverProtectionServiceImpl.checkAction(getRequest("POST","http://localhost:8080/testng123",null,noParameters, referers))); + referers.add("https://localhost:8080/BadProtocol"); + Assert.assertEquals(Action.FORBID, serverProtectionServiceImpl.checkAction(getRequest("POST","http://asdsad/testng123",null,noParameters, noReferers))); + } + + @Test + public void testPostCSRF() { + Map noParameters = ImmutableMap.of(); + Vector noReferers = new Vector(); + HttpServletRequest request = getRequest("GET","http://localhost:8080/testng123",null,noParameters, noReferers); + String token = serverProtectionServiceImpl.getCSRFToken(request); + Assert.assertNotNull(token); + Assert.assertEquals(Action.OK, serverProtectionServiceImpl.checkAction(getRequest("POST","http://localhost:8080/testng123",null, ImmutableMap.of("_csrft",token ), noReferers))); + Assert.assertEquals(Action.FORBID, serverProtectionServiceImpl.checkAction(getRequest("POST","http://localhost:8080/testng123",null, ImmutableMap.of("_csrft",token+"bad" ), noReferers))); + } + + @Test + public void testRedirect() throws UnsupportedEncodingException { + Map noParameters = ImmutableMap.of(); + Vector noReferers = new Vector(); + HttpServletRequest badRequest = getRequest("POST","http://localhost:8080/testng123","x=1&y=2",noParameters, noReferers); + String badRequestUrl = serverProtectionServiceImpl.getRedirectIdentityUrl(badRequest, "ieb"); + Assert.assertNull(badRequestUrl); + HttpServletRequest request = getRequest("GET","http://localhost:8080/testng123","x=1&y=2",noParameters, noReferers); + String redirectUrl = serverProtectionServiceImpl.getRedirectIdentityUrl(request, "ieb"); + Assert.assertNotNull(redirectUrl); + Map params = Maps.newHashMap(); + String queryString = redirectUrl.substring(redirectUrl.indexOf("?")+1); + String redirectURI = redirectUrl.substring(0,redirectUrl.indexOf("?")); + for ( String kv : StringUtils.split(queryString,"&")) { + String[] pp = StringUtils.split(kv,"="); + params.put(URLDecoder.decode(pp[0], "UTF-8"), URLDecoder.decode(pp[1], "UTF-8")); + } + + Assert.assertEquals("ieb", serverProtectionServiceImpl.getIdentity(getRequest("GET", redirectURI, queryString, params, noReferers))); + Assert.assertNull(serverProtectionServiceImpl.getIdentity(getRequest("POST", redirectURI, queryString, params, noReferers))); + Assert.assertNull(serverProtectionServiceImpl.getIdentity(getRequest("GET", "http://localhost:8080/testng123Bad", queryString, params, noReferers))); + params.put("_hmac", "bad"+params.get("_hmac")); + Assert.assertNull(serverProtectionServiceImpl.getIdentity(getRequest("GET", redirectURI, queryString, params, noReferers))); + } + + private HttpServletRequest getRequest(String method, String url, String queryString, Map parameters, Vector referers) { + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + Mockito.when(request.getMethod()).thenReturn(method); + Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer(url)); + String uri = url.substring(url.indexOf("/",url.indexOf("/")+2)); + String host = url.substring(url.indexOf("/")+2, url.indexOf("/",url.indexOf("/")+2)); + Mockito.when(request.getRequestURI()).thenReturn(uri); + Mockito.when(request.getQueryString()).thenReturn(queryString); + Mockito.when(request.getParameter("_hmac")).thenReturn(parameters.get("_hmac")); + Mockito.when(request.getParameter("_csrft")).thenReturn(parameters.get("_csrft")); + Mockito.when(request.getHeader("Host")).thenReturn(host); + Mockito.when(request.getHeaders("Referer")).thenReturn(referers.elements()); + + return request; + } +} diff --git a/extensions/http/src/test/java/uk/co/tfd/sm/http/batch/BatchServletTest.java b/extensions/http/src/test/java/uk/co/tfd/sm/http/batch/BatchServletTest.java new file mode 100644 index 00000000..91d63e88 --- /dev/null +++ b/extensions/http/src/test/java/uk/co/tfd/sm/http/batch/BatchServletTest.java @@ -0,0 +1,125 @@ +/** + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.http.batch; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static uk.co.tfd.sm.http.batch.BatchServlet.REQUESTS_PARAMETER; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintWriter; +import java.util.Map; + +import javax.servlet.RequestDispatcher; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import junit.framework.Assert; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.http.batch.BatchServlet; +import uk.co.tfd.sm.memory.ehcache.CacheManagerServiceImpl; +import uk.co.tfd.sm.template.TemplateServiceImpl; + +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonElement; +import com.google.gson.JsonParser; + +/** + * + */ +public class BatchServletTest { + + private static final Logger LOGGER = LoggerFactory + .getLogger(BatchServletTest.class); + + private BatchServlet servlet; + + @Mock + private HttpServletRequest request; + + @Mock + private HttpServletResponse response; + + public BatchServletTest() { + MockitoAnnotations.initMocks(this); + } + + @Before + public void setUp() throws Exception { + CacheManagerServiceImpl cacheManagerServiceImpl = new CacheManagerServiceImpl(); + Map properties = ImmutableMap.of(); + cacheManagerServiceImpl.activate(properties); + TemplateServiceImpl templateServiceImpl = new TemplateServiceImpl(); + templateServiceImpl.activate(properties); + servlet = new BatchServlet(); + servlet.cacheManagerService = cacheManagerServiceImpl; + servlet.templateService = templateServiceImpl; + servlet.activate(properties); + + } + + @Test + public void testInvalidRequest() throws ServletException, IOException { + when(request.getParameter(REQUESTS_PARAMETER)).thenReturn( + "marlformedparameter"); + servlet.doGet(request, response); + verify(response).sendError(HttpServletResponse.SC_BAD_REQUEST, + "Failed to parse the " + REQUESTS_PARAMETER + " parameter"); + } + + @Test + public void testSimpleRequest() throws Exception { + String json = "[{\"url\" : \"/foo/bar\",\"method\" : \"POST\",\"parameters\" : {\"val\" : 123,\"val@TypeHint\" : \"Long\"}}]"; + + when(request.getParameter(REQUESTS_PARAMETER)).thenReturn(json); + when(request.getRemoteUser()).thenReturn("admin"); + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + PrintWriter writer = new PrintWriter(baos); + + RequestDispatcher dispatcher = mock(RequestDispatcher.class); + when(request.getRequestDispatcher(Mockito.anyString())).thenReturn( + dispatcher); + when(response.getWriter()).thenReturn(writer); + servlet.doPost(request, response); + String result = baos.toString("UTF-8"); + JsonParser jsonParser = new JsonParser(); + JsonElement parsedResult = jsonParser.parse(result); + LOGGER.info("Result {} {} ", result, parsedResult); + + Assert.assertTrue(parsedResult.isJsonObject()); + Assert.assertNotNull(parsedResult.getAsJsonObject().get("results")); + Assert.assertTrue(parsedResult.getAsJsonObject().get("results") + .isJsonArray()); + Assert.assertTrue(parsedResult.getAsJsonObject().get("results") + .getAsJsonArray().get(0).getAsJsonObject().get("success") + .getAsBoolean()); + } + +} diff --git a/extensions/http/src/test/resources/widgets/badwidget/badwidget.html b/extensions/http/src/test/resources/widgets/badwidget/badwidget.html new file mode 100644 index 00000000..6d0f2f67 --- /dev/null +++ b/extensions/http/src/test/resources/widgets/badwidget/badwidget.html @@ -0,0 +1 @@ +The foo went under the bar. \ No newline at end of file diff --git a/extensions/http/src/test/resources/widgets/badwidget/config.json b/extensions/http/src/test/resources/widgets/badwidget/config.json new file mode 100644 index 00000000..54ed3961 --- /dev/null +++ b/extensions/http/src/test/resources/widgets/badwidget/config.json @@ -0,0 +1 @@ +This is not a valid widget config file! \ No newline at end of file diff --git a/extensions/http/src/test/resources/widgets/twitter/bundles/default.properties b/extensions/http/src/test/resources/widgets/twitter/bundles/default.properties new file mode 100644 index 00000000..60053b7f --- /dev/null +++ b/extensions/http/src/test/resources/widgets/twitter/bundles/default.properties @@ -0,0 +1,15 @@ +AN_ERROR_OCCURED_WHEN_SENDING_THE_STATUS_TO_THE_SERVER= An error occurend when sending the status to the server +COULD_NOT_FIND_LAST_STATUS_FOR= Couldn't find the last status for +COULD_NOT_UPDATE_THE_TWITTER_STATUS= Could not update the twitter status +GET_STATUS_FROM_TWITTER= Get status from twitter +NO_STATUS_FROM_TWITTER_FOUND= No status from twitter found +SCREEN_NAME= Screen name +SET_STATUS_TO_TWITTER= Set status to twitter +OK= OK +PASSWORD= Password +PLEASE_INSERT_YOUR_PASSWORD= Please insert your password +PLEASE_INSERT_YOUR_TWITTER_NAME= Please insert your twitter name +WIDGET_TITLE= Twitter +YOUR_SAKAI_STATUS_IS_EMPTY= Your sakai status is empty +YOUR_STATUS_HAS_BEEN_SUCCESSFULLY_UPDATED= Your status has been succesfully updated +YOUR_TWITTER_STATUS_HAS_BEEN_SUCCESSFULLY_UPDATED= Your twitter status has been succesfully updated diff --git a/extensions/http/src/test/resources/widgets/twitter/bundles/en_US.properties b/extensions/http/src/test/resources/widgets/twitter/bundles/en_US.properties new file mode 100644 index 00000000..60053b7f --- /dev/null +++ b/extensions/http/src/test/resources/widgets/twitter/bundles/en_US.properties @@ -0,0 +1,15 @@ +AN_ERROR_OCCURED_WHEN_SENDING_THE_STATUS_TO_THE_SERVER= An error occurend when sending the status to the server +COULD_NOT_FIND_LAST_STATUS_FOR= Couldn't find the last status for +COULD_NOT_UPDATE_THE_TWITTER_STATUS= Could not update the twitter status +GET_STATUS_FROM_TWITTER= Get status from twitter +NO_STATUS_FROM_TWITTER_FOUND= No status from twitter found +SCREEN_NAME= Screen name +SET_STATUS_TO_TWITTER= Set status to twitter +OK= OK +PASSWORD= Password +PLEASE_INSERT_YOUR_PASSWORD= Please insert your password +PLEASE_INSERT_YOUR_TWITTER_NAME= Please insert your twitter name +WIDGET_TITLE= Twitter +YOUR_SAKAI_STATUS_IS_EMPTY= Your sakai status is empty +YOUR_STATUS_HAS_BEEN_SUCCESSFULLY_UPDATED= Your status has been succesfully updated +YOUR_TWITTER_STATUS_HAS_BEEN_SUCCESSFULLY_UPDATED= Your twitter status has been succesfully updated diff --git a/extensions/http/src/test/resources/widgets/twitter/bundles/nl_NL.properties b/extensions/http/src/test/resources/widgets/twitter/bundles/nl_NL.properties new file mode 100644 index 00000000..adedebe0 --- /dev/null +++ b/extensions/http/src/test/resources/widgets/twitter/bundles/nl_NL.properties @@ -0,0 +1,15 @@ +AN_ERROR_OCCURED_WHEN_SENDING_THE_STATUS_TO_THE_SERVER= De twitter status kan niet veranderd worden wegens een fout op de server +COULD_NOT_FIND_LAST_STATUS_FOR= Kon de status voor de volgende persoon niet vinden= +COULD_NOT_UPDATE_THE_TWITTER_STATUS= De twitter status kan niet veranderd worden +GET_STATUS_FROM_TWITTER= Gebruik een status van twitter +NO_STATUS_FROM_TWITTER_FOUND= Er is geen status op twitter gevonden +SCREEN_NAME= Gebruikersnaam +SET_STATUS_TO_TWITTER= Verander je twitter status +OK= OK +PASSWORD= Paswoord +PLEASE_INSERT_YOUR_PASSWORD= Gelieve je paswoord in te vullen +PLEASE_INSERT_YOUR_TWITTER_NAME= Gelieve je twitter naam in te vullen +WIDGET_TITLE= Twitter +YOUR_SAKAI_STATUS_IS_EMPTY= Uw Sakai status is leeg +YOUR_STATUS_HAS_BEEN_SUCCESSFULLY_UPDATED= Uw status is geupdated +YOUR_TWITTER_STATUS_HAS_BEEN_SUCCESSFULLY_UPDATED= Uw twitter status is geupdated diff --git a/extensions/http/src/test/resources/widgets/twitter/bundles/zh_CN.properties b/extensions/http/src/test/resources/widgets/twitter/bundles/zh_CN.properties new file mode 100644 index 00000000..24c85b58 --- /dev/null +++ b/extensions/http/src/test/resources/widgets/twitter/bundles/zh_CN.properties @@ -0,0 +1,6 @@ +GET_STATUS_FROM_TWITTER= 从Twitter中获取状态信息 +OK= 确定 +PASSWORD= 密码 +SCREEN_NAME= 屏幕用户名 +SET_STATUS_TO_TWITTER= 设置状态信息到Twitter中 +WIDGET_TITLE= Twitter diff --git a/extensions/http/src/test/resources/widgets/twitter/config.json b/extensions/http/src/test/resources/widgets/twitter/config.json new file mode 100644 index 00000000..ddb6225e --- /dev/null +++ b/extensions/http/src/test/resources/widgets/twitter/config.json @@ -0,0 +1,14 @@ +{ + "description":"Twitter Widget", + "hasSettings":true, + "i18n": { + "default": "/devwidgets/twitter/bundles/default.json", + "en_US": "/devwidgets/twitter/bundles/en_US.json", + "nl_NL": "/devwidgets/twitter/bundles/nl_NL.json", + "zh_CN": "/devwidgets/twitter/bundles/zh_CN.json" + }, + "id":"twitter", + "name":"Twitter", + "personalportal":true, + "url":"/devwidgets/twitter/twitter.html" +} \ No newline at end of file diff --git a/extensions/http/src/test/resources/widgets/twitter/css/twitter.css b/extensions/http/src/test/resources/widgets/twitter/css/twitter.css new file mode 100644 index 00000000..5a09a939 --- /dev/null +++ b/extensions/http/src/test/resources/widgets/twitter/css/twitter.css @@ -0,0 +1,36 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + +/* Twitter CSS */ +/* Main Container */ +#twitter_main_container {padding:10px;} +#twitter_main_container label {padding:2px 0;margin-bottom:2px;} +#twitter_main_container input[type=radio] {clear:left;float:left;margin-top:5px;} +.twitter_radiocheck_label {float:left;margin:4px 0 0 5px;padding:1px !important;} + +/* Sub Container */ +#twitter_sub_container {clear:both;margin-top:30px;} +.twitter_input_label {display:block;clear:both;} + +/* Message Container */ +.twitter_error {color:#ff0000 !important;} +.twitter_info {color:#4aa02c !important;} +.twitter_input_text {border:1px solid #dddddd;padding:3px} + +/* Icon */ +.twitter .widget_title {background:url("../images/twitter.png") no-repeat scroll 17px 17px transparent;} \ No newline at end of file diff --git a/extensions/http/src/test/resources/widgets/twitter/images/twitter.png b/extensions/http/src/test/resources/widgets/twitter/images/twitter.png new file mode 100644 index 00000000..9c0a6e3f Binary files /dev/null and b/extensions/http/src/test/resources/widgets/twitter/images/twitter.png differ diff --git a/extensions/http/src/test/resources/widgets/twitter/javascript/twitter.js b/extensions/http/src/test/resources/widgets/twitter/javascript/twitter.js new file mode 100644 index 00000000..153b03fe --- /dev/null +++ b/extensions/http/src/test/resources/widgets/twitter/javascript/twitter.js @@ -0,0 +1,318 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +/*global $, sdata, Config */ + +var sakai = sakai || {}; + +sakai.twitter = function(tuid, showSettings){ + + + ////////////////////////////// + // Configuration Variables // + ///////////////////////////// + + var currentSubContainer = ""; // The current subcontainer (get/set) + var twitterinfo; // JSON object containing all the settings + information for this widget + + + /////////////////// + // CSS Selectors // + /////////////////// + + var $rootel = $("#" + tuid); + + var $twitter_get_status = $("#twitter_get_status", $rootel); + var $twitter_main_container = $("#twitter_main_container", $rootel); + var $twitter_message_container = $("#twitter_message_container", $rootel); + var $twitter_set_status = $("#twitter_set_status", $rootel); + var $twitter_sub_container = $("#twitter_sub_container", $rootel); + var $twitter_template_get_status = $("#twitter_template_get_status", $rootel); + var $twitter_template_message = $("#twitter_template_message", $rootel); + var $twitter_template_set_status = $("#twitter_template_set_status", $rootel); + + /** + * Reset the values of the JSON object + */ + var resetValues = function(){ + twitterinfo = { + error: "", + info: "", + screen_name: "", + password: "" + }; + }; + + /** + * Render the template of a twitter container + * @param {String} container Container that will be rendered + */ + var renderTemplate = function(container){ + switch (container) { + case "get_status": + currentSubContainer = "get"; + $.TemplateRenderer($twitter_template_get_status, twitterinfo, $twitter_sub_container); + break; + case "set_status": + currentSubContainer = "set"; + $.TemplateRenderer($twitter_template_set_status, twitterinfo, $twitter_sub_container); + break; + case "message": + $.TemplateRenderer($twitter_template_message, twitterinfo, $twitter_message_container); + break; + } + }; + + /** + * Sets the error message to the json object and renders the template + * @param {String} errorInput Error message + */ + var setError = function(errorInput){ + twitterinfo.error = errorInput; + renderTemplate("message"); + }; + + /** + * Sets the info message to the json object and renders the template + * @param {String} infoInput Info message + */ + var setInfo = function(infoInput){ + twitterinfo.info = infoInput; + renderTemplate("message"); + }; + + /** + * Clear the info and error messages + */ + var clearErrorAndInfo = function(){ + setError(""); + setInfo(""); + }; + + /** + * Change the status for the user + */ + var changeLocalStatus = function(){ + if (twitterinfo.status) { + var basic = { + "status": twitterinfo.status + }; + + var data = { + "basic": $.toJSON(basic), + "_charset_": "utf-8" + }; + + $.ajax({ + url: "/_user" + sakai.data.me.profile.path + "/public/authprofile.json", + type: "POST", + data: data, + success: function(data){ + setInfo("successfullyupdated"); + }, + error: function(xhr, textStatus, thrownError){ + setError("sendstatuserror"); + } + }); + } + else { + setError("nostatusfound"); + } + }; + + /** + * Parse the twitter status object + * @param {String} response JSON response from the server + * @param {Boolean} exists Check if the twitter status exists + */ + var parseTwitterStatus = function(response, exists){ + if (exists && response.length > 0) { + twitterinfo.status = response[0].text; + changeLocalStatus(); + } + else { + setError("nolaststatus"); + } + }; + + /** + * Parse the response after the update + * @param {Object} response JSON response that you get back after updating the status on the server + * @param {Boolean} exists Whether the update was successful or not + */ + var parseTwitterResponse = function(response, exists){ + if (exists) { + setInfo("successfullyupdatedtwitter"); + } + else { + setError("noupdate"); + } + }; + + /** + * Set the screenname of the JSON object + * @param {Boolean} check If true, perform a check if the field is empty or not + */ + var setScreenName = function(check){ + var val = $("#twitter_input_screen_name", $rootel).val(); + if (!check) { + twitterinfo.screen_name = val; + return true; + } + else { + if (!val || val.replace(/ /g, "") === "") { + setError("inserttwittername"); + return false; + } + else { + twitterinfo.screen_name = val; + return true; + } + } + }; + + /** + * Set the password to the json object + */ + var setPassword = function(){ + var val = $("#twitter_input_password", $rootel).val(); + if (!val || val.replace(/ /g, "") === "") { + setError("inserttwitterpassword"); + return false; + } + else { + twitterinfo.password = val; + return true; + } + }; + + /** + * Get the status from twitter + */ + var getStatusFromTwitter = function(){ + if (setScreenName(true)) { + var oPostData = { + user: twitterinfo.screen_name + }; + $.ajax({ + url: sakai.config.URL.TWITTER_GET_URL, + success: function(data){ + parseTwitterStatus(data, true); + }, + error: function(xhr, textStatus, thrownError){ + parseTwitterStatus(xhr.status, false); + }, + data: oPostData + }); + } + }; + + /** + * Set the status to twitter + */ + var setStatusToTwitter = function(){ + if (setScreenName(true) && setPassword()) { + var currentBasic = sakai.data.me.profile.basic; + if (currentBasic) { + currentBasic = $.parseJSON(currentBasic); + } + if (currentBasic.status) { + + var oPostData = { + ":basic-user": twitterinfo.screen_name, + ":basic-password": twitterinfo.password, + status: currentBasic.status, + "_charset_": "utf-8" + }; + + $.ajax({ + url: sakai.config.URL.TWITTER_POST_URL, + type: "POST", + success: function(data){ + parseTwitterResponse(data, true); + }, + error: function(xhr, textStatus, thrownError){ + parseTwitterResponse(xhr.status, false); + }, + data: oPostData + }); + } + else { + setError("emptysakaistatus"); + } + } + }; + + /** + * Show a sub container + * @param {String} target Id of the container that needs to be shown + */ + var showSubContainer = function(target){ + if (currentSubContainer !== target) { + setScreenName(false); + switch (target) { + case "get": + renderTemplate("get_status"); + break; + case "set": + renderTemplate("set_status"); + break; + } + clearErrorAndInfo(); + } + }; + + /** + * Add binding to the various elements in the twitter widget + */ + var addBinding = function(){ + + // Bind the submit event on the get status form + $twitter_get_status.live("submit", function(){ + clearErrorAndInfo(); + getStatusFromTwitter(); + return false; + }); + + // Bind the submit event on the set status form + $twitter_set_status.live("submit", function(){ + clearErrorAndInfo(); + setStatusToTwitter(); + return false; + }); + + // Bind the radiobuttons to switch between 2 views + $("input[name=twitter_input_get_set]").bind("click", function(e, ui){ + showSubContainer(e.target.id.replace("twitter_input_", "")); + }); + }; + + /** + * Function that will be launched if the widget is loaded + */ + var init = function(){ + resetValues(); + renderTemplate("get_status"); + addBinding(); + renderTemplate("error"); + + $twitter_main_container.show(); + }; + init(); +}; + +sakai.api.Widgets.widgetLoader.informOnLoad("twitter"); \ No newline at end of file diff --git a/extensions/http/src/test/resources/widgets/twitter/twitter.html b/extensions/http/src/test/resources/widgets/twitter/twitter.html new file mode 100644 index 00000000..263bd0fa --- /dev/null +++ b/extensions/http/src/test/resources/widgets/twitter/twitter.html @@ -0,0 +1,77 @@ + + + + + + + +
    +
    + + + +
    +
    +
    + + + \ No newline at end of file diff --git a/extensions/integration/pom.xml b/extensions/integration/pom.xml new file mode 100644 index 00000000..dbf303c5 --- /dev/null +++ b/extensions/integration/pom.xml @@ -0,0 +1,76 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + uk.co.tfd.sm.integration + jar + 0.1-SNAPSHOT + Sparse Map :: Integration Tests + Provides Integration Tests. + + + org.slf4j + slf4j-simple + 1.5.10 + + + com.googlecode.guava-osgi + guava-osgi + 9.0.0 + + + junit + junit + 4.4 + jar + compile + + + org.mockito + mockito-all + 1.8.5 + jar + test + + + com.google.code.gson + gson + 1.7.1 + jar + compile + + + commons-io + commons-io + 2.1 + + + + org.apache.httpcomponents + httpclient + 4.1.2 + jar + compile + + + org.apache.httpcomponents + httpmime + 4.1.2 + jar + compile + + + diff --git a/extensions/integration/src/main/java/uk/co/tfd/sm/integration/HttpTestUtils.java b/extensions/integration/src/main/java/uk/co/tfd/sm/integration/HttpTestUtils.java new file mode 100644 index 00000000..c59889b4 --- /dev/null +++ b/extensions/integration/src/main/java/uk/co/tfd/sm/integration/HttpTestUtils.java @@ -0,0 +1,72 @@ +package uk.co.tfd.sm.integration; + +import java.io.IOException; + +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpResponse; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.impl.client.DefaultHttpClient; +import org.junit.Assert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.JsonElement; +import com.google.gson.JsonParser; + +public class HttpTestUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(HttpTestUtils.class); + private HttpClient defaultHttpClient; + + public HttpTestUtils() { + defaultHttpClient = new DefaultHttpClient(); + } + + public JsonElement execute(HttpUriRequest post, int code, + String contentType) throws ClientProtocolException, IOException { + return execute(post, code, contentType, false); + } + + public JsonElement execute(HttpUriRequest post, int code, + String contentType, boolean echo) throws ClientProtocolException, IOException { + post.setHeader("Referer", "/integrationtests"); + HttpResponse response = defaultHttpClient.execute(post); + Assert.assertEquals(code, response.getStatusLine().getStatusCode()); + if (code >= 200 && code < 300) { + Assert.assertEquals(contentType, + response.getHeaders("Content-Type")[0].getValue()); + String jsonBody = IOUtils.toString(response.getEntity() + .getContent()); + if ( echo ) { + LOGGER.info("Got {} ", jsonBody); + } + JsonParser parser = new JsonParser(); + return parser.parse(jsonBody); + } + IOUtils.toString(response.getEntity().getContent()); + + return null; + } + + public JsonElement get(String uri, int code, + String contentType) throws ClientProtocolException, IOException { + return execute(new HttpGet(uri), code, contentType); + } + + public JsonElement get(String uri, int code, + String contentType, boolean echo) throws ClientProtocolException, IOException { + return execute(new HttpGet(uri), code, contentType, echo ); + } + + public HttpResponse execute(HttpUriRequest request) throws ClientProtocolException, IOException { + return defaultHttpClient.execute(request); + } + public HttpResponse get(String request) throws ClientProtocolException, IOException { + return defaultHttpClient.execute(new HttpGet(request)); + } + + +} diff --git a/extensions/integration/src/main/java/uk/co/tfd/sm/integration/IntegrationServer.java b/extensions/integration/src/main/java/uk/co/tfd/sm/integration/IntegrationServer.java new file mode 100644 index 00000000..528b4c43 --- /dev/null +++ b/extensions/integration/src/main/java/uk/co/tfd/sm/integration/IntegrationServer.java @@ -0,0 +1,80 @@ +package uk.co.tfd.sm.integration; + +import java.io.IOException; +import java.net.ConnectException; +import java.net.MalformedURLException; + +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.impl.client.DefaultHttpClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class IntegrationServer { + + private static final Logger LOGGER = LoggerFactory + .getLogger(IntegrationServer.class); + public static final String BASEURL = "http://localhost:8080"; + private static boolean started = false; + + public static void start() throws IOException { + + if (!started) { + if (isStarted(0)) { + LOGGER.info("App already Running on port 8080"); + } else { + throw new IllegalStateException( + "Please start integration test server on port 8080"); + /* + * Removed to prevent a cyclic dependency and build order issues. + File f = new File("target/integrationserver"); + FileUtils.deleteDirectory(f.getAbsoluteFile()); + System.setProperty("sling.home", f.getAbsolutePath()); + NakamuraMain.main(new String[] { "-f", "target/integrationserver.log" }); + LOGGER.info("Started App"); + if (!isStarted(60000)) { + } + */ + } + started = true; + } + } + + private static boolean isStarted(int timeout) { + try { + long endTime = System.currentTimeMillis() + timeout + 1000; + while (System.currentTimeMillis() < endTime) { + try { + DefaultHttpClient client = new DefaultHttpClient(); + HttpGet get = new HttpGet(BASEURL + "/system/console"); + HttpResponse response = client.execute(get); + if (response.getStatusLine().getStatusCode() == 401) { + LOGGER.info("Server up, got 401 for admin interface"); + return true; + } else { + LOGGER.info( + "Server not up, got {} for admin interface ", + response.getStatusLine().getStatusCode()); + return false; + } + } catch (ConnectException e) { + if (System.currentTimeMillis() > endTime) { + return false; + } else { + LOGGER.info("Failed {} ", e.getMessage()); + Thread.sleep(1000); + } + } + } + } catch (InterruptedException e) { + LOGGER.error(e.getMessage(), e); + } catch (MalformedURLException e) { + LOGGER.error(e.getMessage(), e); + } catch (IOException e) { + LOGGER.error(e.getMessage(), e); + } finally { + } + return false; + } + +} diff --git a/extensions/integration/src/main/java/uk/co/tfd/sm/integration/JsonTestUtils.java b/extensions/integration/src/main/java/uk/co/tfd/sm/integration/JsonTestUtils.java new file mode 100644 index 00000000..108dc0af --- /dev/null +++ b/extensions/integration/src/main/java/uk/co/tfd/sm/integration/JsonTestUtils.java @@ -0,0 +1,120 @@ +package uk.co.tfd.sm.integration; + +import java.util.Set; + +import org.junit.Assert; + +import com.google.common.collect.Sets; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +public class JsonTestUtils { + public static void checkProperty(JsonObject json, String propertyName, + String propertyValue) { + JsonElement testProp = json.get(propertyName); + Assert.assertNotNull(testProp); + Assert.assertEquals(propertyValue, testProp.getAsString()); + } + public static void checkProperty(JsonObject json, String propertyName, + Boolean[] booleans) { + JsonArray jsonArray = toJsonArray(json.get(propertyName)); + Assert.assertEquals(booleans.length, jsonArray.size()); + for ( int i = 0; i < booleans.length; i++ ) { + Assert.assertEquals(booleans[i], jsonArray.get(i).getAsBoolean()); + } + } + + public static void checkProperty(JsonObject json, String propertyName, + boolean propertyValue) { + JsonElement testProp = json.get(propertyName); + Assert.assertNotNull(testProp); + Assert.assertEquals(propertyValue, testProp.getAsBoolean()); + } + + public static void checkProperty(JsonObject json, String propertyName, int[] values) { + JsonArray jsonArray = toJsonArray(json.get(propertyName)); + Assert.assertEquals(values.length, jsonArray.size()); + for ( int i = 0; i < values.length; i++ ) { + Assert.assertEquals(values[i], jsonArray.get(i).getAsInt()); + } + } + + public static void checkProperty(JsonObject json, String propertyName, String[] values) { + JsonArray jsonArray = toJsonArray(json.get(propertyName)); + Assert.assertEquals(values.length, jsonArray.size()); + for ( int i = 0; i < values.length; i++ ) { + Assert.assertEquals(values[i], jsonArray.get(i).getAsString()); + } + } + public static void checkProperty(JsonObject json, String propertyName, + Double[] values) { + JsonArray jsonArray = toJsonArray(json.get(propertyName)); + Assert.assertEquals(values.length, jsonArray.size()); + for ( int i = 0; i < values.length; i++ ) { + Assert.assertEquals((Double)values[i], (Double)jsonArray.get(i).getAsDouble()); + } + } + public static void checkProperty(JsonObject json, String propertyName, + double propertyValue) { + JsonElement testProp = json.get(propertyName); + Assert.assertNotNull(testProp); + Assert.assertEquals((Double)propertyValue, (Double)testProp.getAsDouble()); + } + + public static void checkProperty(JsonObject json, String propertyName, + Integer[] values) { + JsonArray jsonArray = toJsonArray(json.get(propertyName)); + Assert.assertEquals(values.length, jsonArray.size()); + for ( int i = 0; i < values.length; i++ ) { + Assert.assertEquals(values[i], (Integer)jsonArray.get(i).getAsInt()); + } + } + + public static void checkProperty(JsonObject json, String propertyName, + Integer propertyValue) { + JsonElement testProp = json.get(propertyName); + Assert.assertNotNull(testProp); + Assert.assertEquals((Integer)propertyValue, (Integer)testProp.getAsInt()); + } + + public static void checkProperty(JsonObject json, String propertyName, + Long[] values) { + JsonArray jsonArray = toJsonArray(json.get(propertyName)); + Assert.assertEquals(values.length, jsonArray.size()); + for ( int i = 0; i < values.length; i++ ) { + Assert.assertEquals(values[i], (Long)jsonArray.get(i).getAsLong()); + } + } + + public static void checkProperty(JsonObject json, String propertyName, + Long propertyValue) { + JsonElement testProp = json.get(propertyName); + Assert.assertNotNull(testProp); + Assert.assertEquals((Long)propertyValue, (Long)testProp.getAsLong()); + } + + public static Set toResponseSet(JsonElement jsonElement) { + Set result = Sets.newHashSet(); + JsonArray responseArray = toJsonArray(jsonElement); + for (int i = 0; i < responseArray.size(); i++) { + JsonElement je = responseArray.get(i); + result.add(je.getAsString()); + } + return result; + } + + public static JsonArray toJsonArray(JsonElement jsonElement) { + Assert.assertNotNull(jsonElement); + Assert.assertTrue(jsonElement.isJsonArray()); + return jsonElement.getAsJsonArray(); + } + + public static JsonObject toJsonObject(JsonElement jsonElement) { + Assert.assertNotNull(jsonElement); + Assert.assertTrue(jsonElement.isJsonObject()); + return jsonElement.getAsJsonObject(); + } + + +} diff --git a/extensions/jaxrs/pom.xml b/extensions/jaxrs/pom.xml new file mode 100644 index 00000000..d8ddff98 --- /dev/null +++ b/extensions/jaxrs/pom.xml @@ -0,0 +1,107 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + uk.co.tfd.sm.jaxrs + bundle + 0.1-SNAPSHOT + Sparse Map :: JAXRS Support + Provides JAX-RS support. + + + + org.apache.felix + maven-scr-plugin + + + org.apache.felix + maven-bundle-plugin + true + + + sparse-map + uk.co.tfd.sm.api.jaxrs.*, javax.ws.rs.* + uk.co.tfd.sm.jaxrs.* + + !Acme.Serve, + !org.junit.*, + !org.apache.commons.httpclient.*, + !org.apache.http.*, + com.sun.*;resolution:=optional, + com.google.common.collect; version="9.0.0", + + * + + true + resteasy-jaxrs, resteasy-jaxb-provider, scannotation, javassist, + jsr250-api + + + + + + + + org.slf4j + slf4j-simple + 1.5.10 + + + org.apache.felix + org.apache.felix.scr.annotations + + + com.googlecode.guava-osgi + guava-osgi + 9.0.0 + + + junit + junit + 4.4 + jar + compile + + + javax.servlet + servlet-api + 2.4 + jar + compile + + + org.mockito + mockito-all + 1.8.5 + jar + test + + + org.jboss.resteasy + jaxrs-api + 2.2.3.GA + + + org.jboss.resteasy + resteasy-jaxrs + 2.2.3.GA + + + org.jboss.resteasy + resteasy-jaxb-provider + 2.2.3.GA + + + + + jboss + http://repository.jboss.org/nexus/content/groups/public/ + + + diff --git a/extensions/jaxrs/src/main/java/uk/co/tfd/sm/api/jaxrs/JaxRestService.java b/extensions/jaxrs/src/main/java/uk/co/tfd/sm/api/jaxrs/JaxRestService.java new file mode 100644 index 00000000..390ac178 --- /dev/null +++ b/extensions/jaxrs/src/main/java/uk/co/tfd/sm/api/jaxrs/JaxRestService.java @@ -0,0 +1,10 @@ +package uk.co.tfd.sm.api.jaxrs; + +/** + * A marker interface for JAX-RS services. + * @author ieb + * + */ +public interface JaxRestService { + +} diff --git a/extensions/jaxrs/src/main/java/uk/co/tfd/sm/jaxrs/ResteasyServlet.java b/extensions/jaxrs/src/main/java/uk/co/tfd/sm/jaxrs/ResteasyServlet.java new file mode 100644 index 00000000..0376191a --- /dev/null +++ b/extensions/jaxrs/src/main/java/uk/co/tfd/sm/jaxrs/ResteasyServlet.java @@ -0,0 +1,186 @@ +package uk.co.tfd.sm.jaxrs; + +import java.io.IOException; +import java.util.Map; +import java.util.Set; + +import javax.servlet.Servlet; +import javax.servlet.ServletConfig; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.HttpHeaders; + +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Deactivate; +import org.apache.felix.scr.annotations.Properties; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.ReferenceCardinality; +import org.apache.felix.scr.annotations.ReferencePolicy; +import org.apache.felix.scr.annotations.ReferenceStrategy; +import org.apache.felix.scr.annotations.References; +import org.apache.felix.scr.annotations.Service; +import org.jboss.resteasy.core.Dispatcher; +import org.jboss.resteasy.core.SynchronousDispatcher; +import org.jboss.resteasy.plugins.server.servlet.HttpRequestFactory; +import org.jboss.resteasy.plugins.server.servlet.HttpResponseFactory; +import org.jboss.resteasy.plugins.server.servlet.HttpServletInputMessage; +import org.jboss.resteasy.plugins.server.servlet.HttpServletResponseWrapper; +import org.jboss.resteasy.plugins.server.servlet.ServletBootstrap; +import org.jboss.resteasy.plugins.server.servlet.ServletContainerDispatcher; +import org.jboss.resteasy.specimpl.UriInfoImpl; +import org.jboss.resteasy.spi.HttpRequest; +import org.jboss.resteasy.spi.HttpResponse; +import org.jboss.resteasy.spi.Registry; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.jaxrs.JaxRestService; + +import com.google.common.collect.Sets; + +@Component(immediate = true, metatype = true) +@Service(value = Servlet.class) +@Properties(value = { @Property(name = "alias", value = "/") }) +@References(value = { @Reference(name = "services", referenceInterface=JaxRestService.class, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC, strategy = ReferenceStrategy.EVENT, bind = "bindService", unbind = "unbindService") }) +public class ResteasyServlet extends HttpServlet implements HttpRequestFactory, + HttpResponseFactory { + + /** + * + */ + private static final long serialVersionUID = 3623498533852144726L; + private static final Logger LOGGER = LoggerFactory.getLogger(ResteasyServlet.class); + protected ServletContainerDispatcher servletContainerDispatcher; + private Set pendingServices = Sets.newHashSet(); + private Object registrationSync = new Object(); + + /** + * {@inheritDoc} + * + * @see org.jboss.resteasy.plugins.server.servlet.HttpServletDispatcher#init(javax.servlet.ServletConfig) + */ + + public Dispatcher getDispatcher() { + return servletContainerDispatcher.getDispatcher(); + } + + public Registry getRegistry() { + return servletContainerDispatcher.getDispatcher().getRegistry(); + } + + @Activate + public void activate(Map properties) { + + } + + @Deactivate + public void deactivate(Map properties ) { + + } + + public void init(ServletConfig servletConfig) throws ServletException { + synchronized (registrationSync) { + ClassLoader bundleClassloader = this.getClass().getClassLoader(); + ClassLoader contextClassloader = Thread.currentThread() + .getContextClassLoader(); + try { + Thread.currentThread().setContextClassLoader(bundleClassloader); + super.init(servletConfig); + ServletBootstrap bootstrap = new ServletBootstrap(servletConfig); + servletContainerDispatcher = new ServletContainerDispatcher(); + servletContainerDispatcher.init( + servletConfig.getServletContext(), bootstrap, this, + this); + servletContainerDispatcher.getDispatcher() + .getDefaultContextObjects() + .put(ServletConfig.class, servletConfig); + + } finally { + Thread.currentThread() + .setContextClassLoader(contextClassloader); + } + Registry registry = getRegistry(); + for (JaxRestService service : pendingServices) { + LOGGER.info("Registering JaxRestService {} ",service); + registry.addSingletonResource(service); + } + pendingServices.clear(); + } + + } + + @Override + public void destroy() { + synchronized (registrationSync) { + super.destroy(); + LOGGER.info("Removing all JaxRestServices "); + servletContainerDispatcher.destroy(); + servletContainerDispatcher = null; + } + } + + protected void bindService(JaxRestService service) { + synchronized (registrationSync) { + if (servletContainerDispatcher == null) { + pendingServices.add(service); + } else { + LOGGER.info("Registering JaxRestService {} ",service); + getRegistry().addSingletonResource(service); + } + } + } + + protected void unbindService(JaxRestService service) { + synchronized (registrationSync) { + if (servletContainerDispatcher == null) { + pendingServices.remove(service); + } else { + LOGGER.info("Removing JaxRestService {} ",service); + getRegistry().removeRegistrations(service.getClass()); + } + } + } + + protected void service(HttpServletRequest httpServletRequest, + HttpServletResponse httpServletResponse) throws ServletException, + IOException { + service(httpServletRequest.getMethod(), httpServletRequest, + httpServletResponse); + } + + public void service(String httpMethod, HttpServletRequest request, + HttpServletResponse response) throws IOException { + servletContainerDispatcher.service(httpMethod, request, response, true); + } + + public HttpRequest createResteasyHttpRequest(String httpMethod, + HttpServletRequest request, HttpHeaders headers, + UriInfoImpl uriInfo, HttpResponse theResponse, + HttpServletResponse response) { + return createHttpRequest(httpMethod, request, headers, uriInfo, + theResponse, response); + } + + public HttpResponse createResteasyHttpResponse(HttpServletResponse response) { + return createServletResponse(response); + } + + protected HttpRequest createHttpRequest(String httpMethod, + HttpServletRequest request, HttpHeaders headers, + UriInfoImpl uriInfo, HttpResponse theResponse, + HttpServletResponse response) { + return new HttpServletInputMessage(request, theResponse, headers, + uriInfo, httpMethod.toUpperCase(), + (SynchronousDispatcher) getDispatcher()); + } + + protected HttpResponse createServletResponse(HttpServletResponse response) { + return new HttpServletResponseWrapper(response, getDispatcher() + .getProviderFactory()); + } + +} diff --git a/extensions/jetty/pom.xml b/extensions/jetty/pom.xml new file mode 100644 index 00000000..d4ef411a --- /dev/null +++ b/extensions/jetty/pom.xml @@ -0,0 +1,139 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + uk.co.tfd.sm.jetty + bundle + 0.1-SNAPSHOT + Sparse Map :: Jetty Configuration + Adds configuration and setup of default servlets for Jetty. + + UTF-8 + + + + + org.apache.felix + maven-bundle-plugin + true + + + + http://www.tfd.co.uk + + Ian Boston + ${project.artifactId} + sakai-nakamura,driver + + org.apache.felix.http.api;version="2.2";-split-package:=merge-first, + org.osgi.service.http;version="1.2";-split-package:=merge-first, + javax.servlet.resources;version="2.5";-split-package:=merge-first, + javax.servlet;version="2.5";-split-package:=merge-first, + javax.servlet.jsp.resources;version="2.5";-split-package:=merge-first, + javax.servlet.http;version="2.5";-split-package:=merge-first, + org.mortbay.util.ajax;version="6.1.24";-split-package:=merge-first, + org.sakaiproject.nakamura.api.servlet + + + org.apache.felix.http.jetty.internal;-split-package:=merge-first, + org.mortbay.management;-split-package:=merge-first, + uk.co.tfd.sm.jetty + + + org.apache.commons.io; version="1.4", + com.google.common.collect; version="9.0.0", + javax.management, + javax.management.loading, + javax.management.modelmbean, + javax.net.ssl;resolution:=optional, + javax.security.cert;resolution:=optional, + javax.servlet;version="2.5";resolution:=optional;-split-package:=merge-first, + javax.servlet.http;version="2.5";resolution:=optional;-split-package:=merge-first, + javax.servlet.jsp.resources;version="2.5";resolution:=optional;-split-package:=merge-first, + javax.servlet.resources;version="2.5";resolution:=optional;-split-package:=merge-first, + javax.xml.parsers;resolution:=optional, + javax.sql;resolution:=optional, + org.apache.felix.http.api;version="2.0";resolution:=optional, + org.osgi.framework;version="1.3";resolution:=optional, + org.osgi.service.http;version="1.2";resolution:=optional, + org.osgi.service.log;version="1.3";resolution:=optional, + org.osgi.util.tracker;version="1.3";resolution:=optional, + org.slf4j;resolution:=optional;-split-package:=merge-first, + org.xml.sax;resolution:=optional, + org.xml.sax.helpers;resolution:=optional, + * + + + org.apache.felix.http.jetty, + jetty-management + + + + + + + + + org.apache.felix + org.apache.felix.http.jetty + 2.2.0 + + + org.apache.felix + org.osgi.core + 1.2.0 + + + org.apache.felix + org.osgi.compendium + 1.2.0 + + + org.mortbay.jetty + jetty-management + 6.1.22 + + + org.mortbay.jetty + jetty + + + provided + + + org.apache.felix + org.apache.felix.scr.annotations + + + + org.slf4j + slf4j-api + 1.5.10 + + + commons-lang + commons-lang + 2.4 + jar + compile + + + commons-io + commons-io + 1.4 + jar + compile + + + com.googlecode.guava-osgi + guava-osgi + 9.0.0 + + + diff --git a/extensions/jetty/src/main/java/org/apache/felix/http/jetty/internal/NakamuraActivator.java b/extensions/jetty/src/main/java/org/apache/felix/http/jetty/internal/NakamuraActivator.java new file mode 100644 index 00000000..e15e402c --- /dev/null +++ b/extensions/jetty/src/main/java/org/apache/felix/http/jetty/internal/NakamuraActivator.java @@ -0,0 +1,25 @@ +package org.apache.felix.http.jetty.internal; + +import org.apache.felix.http.base.internal.AbstractHttpActivator; + +public class NakamuraActivator extends AbstractHttpActivator { + + private NakamuraJettyService jetty; + + protected void doStart() + throws Exception + { + super.doStart(); + this.jetty = new NakamuraJettyService(getBundleContext(), getDispatcherServlet(), getEventDispatcher(), + getHttpServiceController()); + this.jetty.start(); + } + + protected void doStop() + throws Exception + { + this.jetty.stop(); + super.doStop(); + } + +} diff --git a/extensions/jetty/src/main/java/org/apache/felix/http/jetty/internal/NakamuraJettyManagedService.java b/extensions/jetty/src/main/java/org/apache/felix/http/jetty/internal/NakamuraJettyManagedService.java new file mode 100644 index 00000000..88affcf9 --- /dev/null +++ b/extensions/jetty/src/main/java/org/apache/felix/http/jetty/internal/NakamuraJettyManagedService.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.felix.http.jetty.internal; + +import java.util.Dictionary; + +import org.osgi.framework.Bundle; +import org.osgi.framework.ServiceFactory; +import org.osgi.framework.ServiceRegistration; +import org.osgi.service.cm.ManagedService; + +public class NakamuraJettyManagedService implements ServiceFactory +{ + + private final NakamuraJettyService jettyService; + + NakamuraJettyManagedService(final NakamuraJettyService jettyService) + { + this.jettyService = jettyService; + } + + public Object getService(Bundle bundle, ServiceRegistration registration) + { + return new ManagedService() + { + @SuppressWarnings("rawtypes") + public void updated(Dictionary properties) + { + jettyService.updated(properties); + } + }; + } + + public void ungetService(Bundle bundle, ServiceRegistration registration, Object service) + { + // just have the reference dropped, nothing to cleanup + } + +} diff --git a/extensions/jetty/src/main/java/org/apache/felix/http/jetty/internal/NakamuraJettyService.java b/extensions/jetty/src/main/java/org/apache/felix/http/jetty/internal/NakamuraJettyService.java new file mode 100644 index 00000000..c94e896d --- /dev/null +++ b/extensions/jetty/src/main/java/org/apache/felix/http/jetty/internal/NakamuraJettyService.java @@ -0,0 +1,190 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.felix.http.jetty.internal; + +import org.apache.felix.http.base.internal.DispatcherServlet; +import org.apache.felix.http.base.internal.EventDispatcher; +import org.apache.felix.http.base.internal.HttpServiceController; +import org.apache.felix.http.base.internal.logger.SystemLogger; +import org.mortbay.jetty.Server; +import org.mortbay.jetty.security.HashUserRealm; +import org.mortbay.jetty.servlet.Context; +import org.mortbay.jetty.servlet.ServletHolder; +import org.mortbay.management.MBeanContainer; +import org.mortbay.xml.XmlConfiguration; +import org.osgi.framework.BundleContext; +import org.osgi.framework.Constants; +import org.osgi.framework.ServiceRegistration; + +import java.io.InputStream; +import java.lang.management.ManagementFactory; +import java.util.Dictionary; +import java.util.Hashtable; +import java.util.Properties; + +import javax.management.MBeanServer; + +public final class NakamuraJettyService + implements Runnable +{ + /** PID for configuration of the HTTP service. */ + private static final String PID = "org.apache.felix.http"; + + private final BundleContext context; + private boolean running; + private Thread thread; + private ServiceRegistration configServiceReg; + private Server server; + private DispatcherServlet dispatcher; + private EventDispatcher eventDispatcher; + private final HttpServiceController controller; + + public NakamuraJettyService(BundleContext context, DispatcherServlet dispatcher, EventDispatcher eventDispatcher, + HttpServiceController controller) + { + this.context = context; + this.dispatcher = dispatcher; + this.eventDispatcher = eventDispatcher; + this.controller = controller; + } + + public void start() + throws Exception + { + JettyLogger.init(); + + Properties props = new Properties(); + props.put(Constants.SERVICE_PID, PID); + this.configServiceReg = this.context.registerService("org.osgi.service.cm.ManagedService", + new NakamuraJettyManagedService(this), props); + + this.thread = new Thread(this, "Jetty HTTP Service"); + this.thread.start(); + } + + public void stop() + throws Exception + { + if (this.configServiceReg != null) { + this.configServiceReg.unregister(); + } + + this.running = false; + this.thread.interrupt(); + + try { + this.thread.join(3000); + } catch (InterruptedException e) { + // Do nothing + } + } + + private void publishServiceProperties() + { + Hashtable props = new Hashtable(); + this.controller.setProperties(props); + } + + @SuppressWarnings("rawtypes") + public void updated(Dictionary props) + { + if (this.running && (this.thread != null)) { + this.thread.interrupt(); + } + } + + private void startJetty() + { + try { + initializeJetty(); + } catch (Exception e) { + SystemLogger.error("Exception while initializing Jetty.", e); + } + } + + private void stopJetty() + { + if (this.server != null) + { + try + { + this.server.stop(); + this.server = null; + } + catch (Exception e) + { + SystemLogger.error("Exception while stopping Jetty.", e); + } + } + } + + private void initializeJetty() + throws Exception + { + StringBuffer message = new StringBuffer("Started jetty ").append(Server.getVersion()).append(" at port(s)"); + HashUserRealm realm = new HashUserRealm("OSGi HTTP Service Realm"); + this.server = new Server(); + + MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer(); + MBeanContainer mBeanContainer = new MBeanContainer(mBeanServer); + server.getContainer().addEventListener(mBeanContainer); + mBeanContainer.start(); + + this.server.addUserRealm(realm); + InputStream configStream = this.getClass().getClassLoader().getResourceAsStream("jetty.xml"); + XmlConfiguration configuration = new XmlConfiguration(configStream); + configuration.configure(server); + configStream.close(); + +// Context staticContext = new Context(this.server, "/test", false, false); +// staticContext.addServlet(new ServletHolder(staticContentServlet), "/*"); + + Context context = new Context(this.server, "/", Context.NO_SESSIONS | Context.NO_SECURITY); + context.addEventListener(eventDispatcher); +// context.getSessionHandler().addEventListener(eventDispatcher); + + SystemLogger.info("Binding Dispatcher "+this.dispatcher+" to /*"); + + context.addServlet(new ServletHolder(this.dispatcher), "/*"); + + + this.server.start(); + SystemLogger.info(message.toString()); + publishServiceProperties(); + } + + + public void run() + { + this.running = true; + Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); + + while (this.running) { + startJetty(); + + synchronized (this) { + try { + wait(); + } catch (InterruptedException e) { + // we will definitely be interrupted + } + } + + stopJetty(); + } + } +} diff --git a/extensions/jetty/src/main/java/org/mortbay/management/MBeanContainer.java b/extensions/jetty/src/main/java/org/mortbay/management/MBeanContainer.java new file mode 100644 index 00000000..f2c95707 --- /dev/null +++ b/extensions/jetty/src/main/java/org/mortbay/management/MBeanContainer.java @@ -0,0 +1,315 @@ +//======================================================================== +//Copyright 2005 Mort Bay Consulting Pty. Ltd. +//------------------------------------------------------------------------ +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +//http://www.apache.org/licenses/LICENSE-2.0 +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +//======================================================================== + +package org.mortbay.management; + +import java.net.URL; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.WeakHashMap; + +import javax.management.Attribute; +import javax.management.MBeanServer; +import javax.management.ObjectInstance; +import javax.management.ObjectName; +import javax.management.loading.PrivateMLet; + +import org.mortbay.component.Container; +import org.mortbay.component.Container.Relationship; +import org.mortbay.log.Log; +import org.mortbay.log.Logger; +import org.mortbay.util.MultiMap; +import org.mortbay.util.TypeUtil; +@SuppressWarnings("all") +public class MBeanContainer implements Container.Listener +{ + private final MBeanServer _server; + private volatile int _managementPort; + private final WeakHashMap _beans = new WeakHashMap(); + private final HashMap _unique = new HashMap(); + private String _domain = null; + private MultiMap _relations = new MultiMap(); + + + public synchronized ObjectName findMBean(Object object) + { + ObjectName bean = (ObjectName)_beans.get(object); + return bean==null?null:bean; + } + + public synchronized Object findBean(ObjectName oname) + { + for (Iterator iter = _beans.entrySet().iterator(); iter.hasNext();) + { + Map.Entry entry = (Map.Entry) iter.next(); + ObjectName bean = (ObjectName)entry.getValue(); + if (bean.equals(oname)) + return entry.getKey(); + } + return null; + } + + public MBeanContainer(MBeanServer server) + { + this._server = server; + Logger log = Log.getLog(); + if (log!=null) + addBean(log); + } + + public MBeanServer getMBeanServer() + { + return _server; + } + + + public void setDomain (String domain) + { + _domain =domain; + } + + public String getDomain() + { + return _domain; + } + + public void setManagementPort(int port) + { + this._managementPort = port; + } + + public void start() + { + if (_managementPort > 0) + { + try + { + Log.warn("HttpAdaptor for mx4j is not secure"); + + PrivateMLet mlet = new PrivateMLet(new URL[0], Thread.currentThread().getContextClassLoader(), false); + ObjectName mletName = ObjectName.getInstance("mx4j", "name", "HttpAdaptorLoader"); + _server.registerMBean(mlet, mletName); + + ObjectName adaptorName = ObjectName.getInstance("mx4j", "name", "HttpAdaptor"); + _server.createMBean("mx4j.tools.adaptor.http.HttpAdaptor", adaptorName, mletName); + _server.setAttribute(adaptorName, new Attribute("Port", new Integer(_managementPort))); + _server.setAttribute(adaptorName, new Attribute("Host", "localhost")); + + ObjectName processorName = ObjectName.getInstance("mx4j", "name", "XSLTProcessor"); + _server.createMBean("mx4j.tools.adaptor.http.XSLTProcessor", processorName, mletName); + _server.setAttribute(adaptorName, new Attribute("ProcessorName", processorName)); + + _server.invoke(adaptorName, "start", null, null); + + Runtime.getRuntime().addShutdownHook(new ShutdownHook(mletName, adaptorName, processorName)); + } + catch (Exception e) + { + Log.warn(e); + } + } + } + + public synchronized void add(Relationship relationship) + { + ObjectName parent=(ObjectName)_beans.get(relationship.getParent()); + if (parent==null) + { + addBean(relationship.getParent()); + parent=(ObjectName)_beans.get(relationship.getParent()); + } + + ObjectName child=(ObjectName)_beans.get(relationship.getChild()); + if (child==null) + { + addBean(relationship.getChild()); + child=(ObjectName)_beans.get(relationship.getChild()); + } + + if (parent!=null && child!=null) + _relations.add(parent,relationship); + + + } + + public synchronized void remove(Relationship relationship) + { + ObjectName parent=(ObjectName)_beans.get(relationship.getParent()); + ObjectName child=(ObjectName)_beans.get(relationship.getChild()); + if (parent!=null && child!=null) + _relations.removeValue(parent,relationship); + } + + public synchronized void removeBean(Object obj) + { + ObjectName bean=(ObjectName)_beans.get(obj); + + if (bean!=null) + { + List r=_relations.getValues(bean); + if (r!=null && r.size()>0) + { + Log.debug("Unregister {}", r); + Iterator iter = new ArrayList(r).iterator(); + while (iter.hasNext()) + { + Relationship rel = (Relationship)iter.next(); + rel.getContainer().update(rel.getParent(),rel.getChild(),null,rel.getRelationship(),true); + } + } + + try + { + _server.unregisterMBean(bean); + Log.debug("Unregistered {}", bean); + } + catch (javax.management.InstanceNotFoundException e) + { + Log.ignore(e); + } + catch (Exception e) + { + Log.warn(e); + } + } + } + + public synchronized void addBean(Object obj) + { + try + { + if (obj == null || _beans.containsKey(obj)) + return; + + Object mbean = ObjectMBean.mbeanFor(obj); + if (mbean == null) + return; + + ObjectName oname = null; + if (mbean instanceof ObjectMBean) + { + ((ObjectMBean) mbean).setMBeanContainer(this); + oname = ((ObjectMBean)mbean).getObjectName(); + } + + //no override mbean object name, so make a generic one + if (oname == null) + { + String type=obj.getClass().getName().toLowerCase(); + int dot = type.lastIndexOf('.'); + if (dot >= 0) + type = type.substring(dot + 1); + + String name=null; + if (mbean instanceof ObjectMBean) + { + name = ((ObjectMBean)mbean).getObjectNameBasis(); + if (name!=null) + { + name=name.replace('\\','/'); + if (name.endsWith("/")) + name=name.substring(0,name.length()-1); + + int slash=name.lastIndexOf('/',name.length()-1); + if (slash>0) + name=name.substring(slash+1); + dot=name.lastIndexOf('.'); + if (dot>0) + name=name.substring(0,dot); + + name=name.replace(':','_').replace('*','_').replace('?','_').replace('=','_').replace(',','_').replace(' ','_'); + } + } + + String basis=(name!=null&&name.length()>1)?("type="+type+",name="+name):("type="+type); + + Integer count = (Integer) _unique.get(basis); + count = TypeUtil.newInteger(count == null ? 0 : (1 + count.intValue())); + _unique.put(basis, count); + + //if no explicit domain, create one + String domain = _domain; + if (domain==null) + domain = obj.getClass().getPackage().getName(); + + oname = ObjectName.getInstance(domain+":"+basis+",id="+count); + } + try { + _server.unregisterMBean(oname); + Log.warn("Replacing {} ", oname.getCanonicalName()); + } catch ( Exception e) { + Log.ignore(e); + } + ObjectInstance oinstance = _server.registerMBean(mbean, oname); + Log.debug("Registered {}" , oinstance.getObjectName()); + _beans.put(obj, oinstance.getObjectName()); + + } + catch (Exception e) + { + Log.warn("bean: "+obj,e); + } + } + + private class ShutdownHook extends Thread + { + private final ObjectName mletName; + private final ObjectName adaptorName; + private final ObjectName processorName; + + public ShutdownHook(ObjectName mletName, ObjectName adaptorName, ObjectName processorName) + { + this.mletName = mletName; + this.adaptorName = adaptorName; + this.processorName = processorName; + } + + public void run() + { + halt(); + unregister(processorName); + unregister(adaptorName); + unregister(mletName); + } + + private void halt() + { + try + { + _server.invoke(adaptorName, "stop", null, null); + } + catch (Exception e) + { + Log.warn(e); + } + } + + private void unregister(ObjectName objectName) + { + try + { + _server.unregisterMBean(objectName); + Log.debug("Unregistered " + objectName); + } + catch (Exception e) + { + Log.warn(e); + } + } + } + +} diff --git a/extensions/jetty/src/main/java/org/mortbay/management/ObjectMBean.java b/extensions/jetty/src/main/java/org/mortbay/management/ObjectMBean.java new file mode 100644 index 00000000..322298bd --- /dev/null +++ b/extensions/jetty/src/main/java/org/mortbay/management/ObjectMBean.java @@ -0,0 +1,721 @@ +//======================================================================== +//Copyright 2004 Mort Bay Consulting Pty. Ltd. +//------------------------------------------------------------------------ +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +//http://www.apache.org/licenses/LICENSE-2.0 +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +//======================================================================== + +package org.mortbay.management; + +import java.lang.reflect.Array; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Locale; +import java.util.Map; +import java.util.MissingResourceException; +import java.util.ResourceBundle; +import java.util.Set; + +import javax.management.Attribute; +import javax.management.AttributeList; +import javax.management.AttributeNotFoundException; +import javax.management.DynamicMBean; +import javax.management.InvalidAttributeValueException; +import javax.management.MBeanAttributeInfo; +import javax.management.MBeanConstructorInfo; +import javax.management.MBeanException; +import javax.management.MBeanInfo; +import javax.management.MBeanNotificationInfo; +import javax.management.MBeanOperationInfo; +import javax.management.MBeanParameterInfo; +import javax.management.ObjectName; +import javax.management.ReflectionException; +import javax.management.modelmbean.ModelMBean; + +import org.mortbay.log.Log; +import org.mortbay.util.LazyList; +import org.mortbay.util.Loader; +import org.mortbay.util.TypeUtil; + +/* ------------------------------------------------------------ */ +/** ObjectMBean. + * A dynamic MBean that can wrap an arbitary Object instance. + * the attributes and methods exposed by this bean are controlled by + * the merge of property bundles discovered by names related to all + * superclasses and all superinterfaces. + * + * Attributes and methods exported may be "Object" and must exist on the + * wrapped object, or "MBean" and must exist on a subclass of OBjectMBean + * or "MObject" which exists on the wrapped object, but whose values are + * converted to MBean object names. + * + */ +@SuppressWarnings("all") +public class ObjectMBean implements DynamicMBean +{ + private static Class[] OBJ_ARG = new Class[]{Object.class}; + + protected Object _managed; + private MBeanInfo _info; + private Map _getters=new HashMap(); + private Map _setters=new HashMap(); + private Map _methods=new HashMap(); + private Set _convert=new HashSet(); + private ClassLoader _loader; + private MBeanContainer _mbeanContainer; + + private static String OBJECT_NAME_CLASS = ObjectName.class.getName(); + private static String OBJECT_NAME_ARRAY_CLASS = ObjectName[].class.getName(); + + /* ------------------------------------------------------------ */ + /** + * Create MBean for Object. Attempts to create an MBean for the object by searching the package + * and class name space. For example an object of the type + * + *
    +     * class com.acme.MyClass extends com.acme.util.BaseClass implements com.acme.Iface
    +     * 
    + * + * Then this method would look for the following classes: + *
      + *
    • com.acme.management.MyClassMBean + *
    • com.acme.util.management.BaseClassMBean + *
    • org.mortbay.management.ObjectMBean + *
    + * + * @param o The object + * @return A new instance of an MBean for the object or null. + */ + public static Object mbeanFor(Object o) + { + try + { + Class oClass = o.getClass(); + Object mbean = null; + + while (mbean == null && oClass != null) + { + String pName = oClass.getPackage().getName(); + String cName = oClass.getName().substring(pName.length() + 1); + String mName = pName + ".management." + cName + "MBean"; + + + try + { + Class mClass = (Object.class.equals(oClass))?oClass=ObjectMBean.class:Loader.loadClass(oClass,mName,true); + if (Log.isDebugEnabled()) + Log.debug("mbeanFor " + o + " mClass=" + mClass); + + try + { + Constructor constructor = mClass.getConstructor(OBJ_ARG); + mbean=constructor.newInstance(new Object[]{o}); + } + catch(Exception e) + { + Log.ignore(e); + if (ModelMBean.class.isAssignableFrom(mClass)) + { + mbean=mClass.newInstance(); + ((ModelMBean)mbean).setManagedResource(o, "objectReference"); + } + } + + if (Log.isDebugEnabled()) + Log.debug("mbeanFor " + o + " is " + mbean); + return mbean; + } + catch (ClassNotFoundException e) + { + if (e.toString().contains("MBean")) + Log.ignore(e); + else + Log.warn(e); + } + catch (Error e) + { + Log.warn(e); + mbean = null; + } + catch (Exception e) + { + Log.warn(e); + mbean = null; + } + + oClass = oClass.getSuperclass(); + } + } + catch (Exception e) + { + Log.ignore(e); + } + return null; + } + + + public ObjectMBean(Object managedObject) + { + _managed = managedObject; + _loader = Thread.currentThread().getContextClassLoader(); + } + + public Object getManagedObject() + { + return _managed; + } + + public ObjectName getObjectName() + { + return null; + } + + public String getObjectNameBasis() + { + return null; + } + + protected void setMBeanContainer(MBeanContainer container) + { + this._mbeanContainer = container; + } + + public MBeanContainer getMBeanContainer () + { + return this._mbeanContainer; + } + + + public MBeanInfo getMBeanInfo() + { + try + { + if (_info==null) + { + // Start with blank lazy lists attributes etc. + String desc=null; + Object attributes=null; + Object constructors=null; + Object operations=null; + Object notifications=null; + + // Find list of classes that can influence the mbean + Class o_class=_managed.getClass(); + Object influences = findInfluences(null, _managed.getClass()); + + // Set to record defined items + Set defined=new HashSet(); + + // For each influence + for (int i=0;i0) + { + // define an operation + if (!defined.contains(key) && key.indexOf('[')<0) + { + defined.add(key); + operations=LazyList.add(operations,defineOperation(key, value, bundle)); + } + } + else + { + // define an attribute + if (!defined.contains(key)) + { + defined.add(key); + attributes=LazyList.add(attributes,defineAttribute(key, value)); + } + } + } + + } + catch(MissingResourceException e) + { + Log.ignore(e); + } + } + + _info = new MBeanInfo(o_class.getName(), + desc, + (MBeanAttributeInfo[])LazyList.toArray(attributes, MBeanAttributeInfo.class), + (MBeanConstructorInfo[])LazyList.toArray(constructors, MBeanConstructorInfo.class), + (MBeanOperationInfo[])LazyList.toArray(operations, MBeanOperationInfo.class), + (MBeanNotificationInfo[])LazyList.toArray(notifications, MBeanNotificationInfo.class)); + } + } + catch(RuntimeException e) + { + Log.warn(e); + throw e; + } + return _info; + } + + + /* ------------------------------------------------------------ */ + public Object getAttribute(String name) throws AttributeNotFoundException, MBeanException, ReflectionException + { + Method getter = (Method) _getters.get(name); + if (getter == null) + throw new AttributeNotFoundException(name); + try + { + Object o = _managed; + if (getter.getDeclaringClass().isInstance(this)) + o = this; // mbean method + + // get the attribute + Object r=getter.invoke(o, (java.lang.Object[]) null); + + // convert to ObjectName if need be. + if (r!=null && _convert.contains(name)) + { + if (r.getClass().isArray()) + { + ObjectName[] on = new ObjectName[Array.getLength(r)]; + for (int i=0;i 0 ? "," : "") + signature[i]; + methodKey += ")"; + + ClassLoader old_loader=Thread.currentThread().getContextClassLoader(); + try + { + Thread.currentThread().setContextClassLoader(_loader); + Method method = (Method) _methods.get(methodKey); + if (method == null) + throw new NoSuchMethodException(methodKey); + + Object o = _managed; + if (method.getDeclaringClass().isInstance(this)) + o = this; + return method.invoke(o, params); + } + catch (NoSuchMethodException e) + { + Log.warn(Log.EXCEPTION, e); + throw new ReflectionException(e); + } + catch (IllegalAccessException e) + { + Log.warn(Log.EXCEPTION, e); + throw new MBeanException(e); + } + catch (InvocationTargetException e) + { + Log.warn(Log.EXCEPTION, e); + throw new ReflectionException((Exception) e.getTargetException()); + } + finally + { + Thread.currentThread().setContextClassLoader(old_loader); + } + } + + private static Object findInfluences(Object influences, Class aClass) + { + if (aClass!=null) + { + // This class is an influence + influences=LazyList.add(influences,aClass); + + // So are the super classes + influences=findInfluences(influences,aClass.getSuperclass()); + + // So are the interfaces + Class[] ifs = aClass.getInterfaces(); + for (int i=0;ifs!=null && i + *
  • "Object" The field/method is on the managed object. + *
  • "MBean" The field/method is on the mbean proxy object + *
  • "MObject" The field/method is on the managed object and value should be converted to MBean reference + *
  • "MMBean" The field/method is on the mbean proxy object and value should be converted to MBean reference + * + * the access is either "RW" or "RO". + */ + public MBeanAttributeInfo defineAttribute(String name, String metaData) + { + String description = ""; + boolean writable = true; + boolean onMBean = false; + boolean convert = false; + + if (metaData!= null) + { + String[] tokens = metaData.split(":", 3); + for (int t=0;t0?",":"(")+args[i]; + } + signature+=(i>0?")":"()"); + + // Build param infos + for (i = 0; i < args.length; i++) + { + String param_desc = bundle.getString(signature + "[" + i + "]"); + parts=param_desc.split(" *: *",2); + if (Log.isDebugEnabled()) + Log.debug(parts[0]+": "+parts[1]); + pInfo[i] = new MBeanParameterInfo(parts[0].trim(), args[i], parts[1].trim()); + } + + // build the operation info + Method method = oClass.getMethod(method_name, types); + Class returnClass = method.getReturnType(); + _methods.put(signature, method); + if (convert) + _convert.add(signature); + + return new MBeanOperationInfo(method_name, description, pInfo, returnClass.isPrimitive() ? TypeUtil.toName(returnClass) : (returnClass.getName()), impact); + } + catch (Exception e) + { + Log.warn("Operation '"+signature+"'", e); + throw new IllegalArgumentException(e.toString()); + } + + } + +} diff --git a/extensions/jetty/src/main/java/org/sakaiproject/nakamura/api/servlet/HttpOnlyCookie.java b/extensions/jetty/src/main/java/org/sakaiproject/nakamura/api/servlet/HttpOnlyCookie.java new file mode 100644 index 00000000..ee30a053 --- /dev/null +++ b/extensions/jetty/src/main/java/org/sakaiproject/nakamura/api/servlet/HttpOnlyCookie.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.servlet; + + +/** + * + */ +public class HttpOnlyCookie extends org.mortbay.jetty.HttpOnlyCookie { + + /** + * @param name + * @param value + */ + public HttpOnlyCookie(String name, String value) { + super(name, value); + } + +} diff --git a/extensions/jetty/src/main/java/uk/co/tfd/sm/jetty/GzipFilter.java b/extensions/jetty/src/main/java/uk/co/tfd/sm/jetty/GzipFilter.java new file mode 100644 index 00000000..610bd3c8 --- /dev/null +++ b/extensions/jetty/src/main/java/uk/co/tfd/sm/jetty/GzipFilter.java @@ -0,0 +1,616 @@ +//======================================================================== +//Copyright 2007 Mort Bay Consulting Pty. Ltd. +//------------------------------------------------------------------------ +//Licensed under the Apache License, Version 2.0 (the "License"); +//you may not use this file except in compliance with the License. +//You may obtain a copy of the License at +//http://www.apache.org/licenses/LICENSE-2.0 +//Unless required by applicable law or agreed to in writing, software +//distributed under the License is distributed on an "AS IS" BASIS, +//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +//See the License for the specific language governing permissions and +//limitations under the License. +//======================================================================== +package uk.co.tfd.sm.jetty; + +import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.io.UnsupportedEncodingException; +import java.util.HashSet; +import java.util.Set; +import java.util.StringTokenizer; +import java.util.zip.GZIPOutputStream; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletOutputStream; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpServletResponseWrapper; + +import org.mortbay.servlet.UserAgentFilter; +import org.mortbay.util.ByteArrayOutputStream2; +import org.mortbay.util.StringUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/* ------------------------------------------------------------ */ +/** GZIP Filter + * This filter will gzip the content of a response iff:
      + *
    • The filter is mapped to a matching path
    • + *
    • The response status code is >=200 and <300 + *
    • The content length is unknown or more than the minGzipSize initParameter or the minGzipSize is 0(default)
    • + *
    • The content-type is in the comma separated list of mimeTypes set in the mimeTypes initParameter or + * if no mimeTypes are defined the content-type is not "application/gzip"
    • + *
    • No content-encoding is specified by the resource
    • + *
    + * + *

    + * Compressing the content can greatly improve the network bandwidth usage, but at a cost of memory and + * CPU cycles. If this filter is mapped for static content, then use of efficient direct NIO may be + * prevented, thus use of the gzip mechanism of the {@link org.mortbay.jetty.servlet.DefaultServlet} is + * advised instead. + *

    + *

    + * This filter extends {@link UserAgentFilter} and if the the initParameter excludedAgents + * is set to a comma separated list of user agents, then these agents will be excluded from gzip content. + *

    + * + * @author gregw + * + */ +public class GzipFilter extends UserAgentFilter +{ + private static final Logger LOGGER = LoggerFactory.getLogger(GzipFilter.class); + protected Set _mimeTypes; + protected int _bufferSize=8192; + protected int _minGzipSize=0; + protected Set _excluded; + + public void init(FilterConfig filterConfig) throws ServletException + { + super.init(filterConfig); + + String tmp=filterConfig.getInitParameter("bufferSize"); + if (tmp!=null) + _bufferSize=Integer.parseInt(tmp); + + tmp=filterConfig.getInitParameter("minGzipSize"); + if (tmp!=null) + _minGzipSize=Integer.parseInt(tmp); + + tmp=filterConfig.getInitParameter("mimeTypes"); + if (tmp!=null) + { + _mimeTypes=new HashSet(); + StringTokenizer tok = new StringTokenizer(tmp,",",false); + while (tok.hasMoreTokens()) + _mimeTypes.add(tok.nextToken()); + } + + tmp=filterConfig.getInitParameter("excludedAgents"); + if (tmp!=null) + { + _excluded=new HashSet(); + StringTokenizer tok = new StringTokenizer(tmp,",",false); + while (tok.hasMoreTokens()) + _excluded.add(tok.nextToken()); + } + } + + public void destroy() + { + } + + public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) + throws IOException, ServletException + { + HttpServletRequest request=(HttpServletRequest)req; + HttpServletResponse response=(HttpServletResponse)res; + + String ae = request.getHeader("accept-encoding"); + Boolean gzip=(Boolean)request.getAttribute("GzipFilter"); + if (ae != null && ae.indexOf("gzip")>=0 && !response.containsHeader("Content-Encoding") && + (gzip==null || gzip.booleanValue()) && !"HEAD".equalsIgnoreCase(request.getMethod())) + { + if (_excluded!=null) + { + String ua=getUserAgent(request); + if (_excluded.contains(ua)) + { + super.doFilter(request,response,chain); + return; + } + } + + GZIPResponseWrapper wrappedResponse=newGZIPResponseWrapper(request,response); + + boolean exceptional=true; + try + { + super.doFilter(request,wrappedResponse,chain); + exceptional=false; + } + catch(RuntimeException e) + { + request.setAttribute("GzipFilter",Boolean.FALSE); + if (!response.isCommitted()) + response.reset(); + throw e; + } + finally + { + if (exceptional && !response.isCommitted()) + { + wrappedResponse.resetBuffer(); + wrappedResponse.noGzip(true, "Exception"); + } + else + wrappedResponse.finish(); + } + } + else + { + super.doFilter(request,response,chain); + } + } + + protected GZIPResponseWrapper newGZIPResponseWrapper(HttpServletRequest request, HttpServletResponse response) + { + return new GZIPResponseWrapper(request,response); + } + + /* + * Allows derived implementations to replace PrintWriter implementation + */ + protected PrintWriter newWriter(OutputStream out,String encoding) throws UnsupportedEncodingException + { + return encoding==null?new PrintWriter(out):new PrintWriter(new OutputStreamWriter(out,encoding)); + } + + public class GZIPResponseWrapper extends HttpServletResponseWrapper + { + HttpServletRequest _request; + boolean _noGzip; + boolean _hardNoGzip; + PrintWriter _writer; + GzipStream _gzStream; + long _contentLength=-1; + + public GZIPResponseWrapper(HttpServletRequest request, HttpServletResponse response) + { + super(response); + _request=request; + } + + public void setContentType(String ct) + { + super.setContentType(ct); + + if (ct!=null) + { + int colon=ct.indexOf(";"); + if (colon>0) + ct=ct.substring(0,colon); + } + + if ((_gzStream==null || _gzStream._out==null) && + (_mimeTypes==null && "application/gzip".equalsIgnoreCase(ct) || + _mimeTypes!=null && (ct==null||!_mimeTypes.contains(StringUtil.asciiToLowerCase(ct))))) + { + noGzip(true, "Wrong Mime Type, "+ct); + } + } + + + public void setStatus(int sc, String sm) + { + super.setStatus(sc,sm); + if (sc<200||sc>=300) + noGzip(false, "Status Code "+sc); + else if ( !isCommitted() ) + doGzip(); + + } + + public void setStatus(int sc) + { + super.setStatus(sc); + if (sc<200||sc>=300) + noGzip(false, "Status Code "+sc ); + else + doGzip(); + } + + public void setContentLength(int length) + { + _contentLength=length; + if (_gzStream!=null) + _gzStream.setContentLength(length); + } + + public void addHeader(String name, String value) + { + if ("content-length".equalsIgnoreCase(name)) + { + _contentLength=Long.parseLong(value); + if (_gzStream!=null) + _gzStream.setContentLength(_contentLength); + } + else if ("content-type".equalsIgnoreCase(name)) + { + setContentType(value); + } + else if ("content-encoding".equalsIgnoreCase(name)) + { + super.addHeader(name,value); + if (!isCommitted()) + { + noGzip(true, "Header "+name+" "+value); + } + } + else + super.addHeader(name,value); + } + + public void setHeader(String name, String value) + { + if ("content-length".equalsIgnoreCase(name)) + { + _contentLength=Long.parseLong(value); + if (_gzStream!=null) + _gzStream.setContentLength(_contentLength); + } + else if ("content-type".equalsIgnoreCase(name)) + { + setContentType(value); + } + else if ("content-encoding".equalsIgnoreCase(name)) + { + super.setHeader(name,value); + if (!isCommitted()) + { + noGzip(true, "Header "+name+" "+value); + } + } + else + super.setHeader(name,value); + } + + public void setIntHeader(String name, int value) + { + if ("content-length".equalsIgnoreCase(name)) + { + _contentLength=value; + if (_gzStream!=null) + _gzStream.setContentLength(_contentLength); + } + else + super.setIntHeader(name,value); + } + + public void flushBuffer() throws IOException + { + if (_writer!=null) + _writer.flush(); + if (_gzStream!=null) + _gzStream.finish(); + else + getResponse().flushBuffer(); + } + + public void reset() + { + super.reset(); + if (_gzStream!=null) + _gzStream.resetBuffer(); + _writer=null; + _gzStream=null; + _noGzip=false; + _hardNoGzip=false; + _contentLength=-1; + } + + public void resetBuffer() + { + super.resetBuffer(); + if (_gzStream!=null) + _gzStream.resetBuffer(); + _writer=null; + _gzStream=null; + } + + public void sendError(int sc, String msg) throws IOException + { + resetBuffer(); + super.sendError(sc,msg); + } + + public void sendError(int sc) throws IOException + { + resetBuffer(); + super.sendError(sc); + } + + public void sendRedirect(String location) throws IOException + { + resetBuffer(); + super.sendRedirect(location); + } + + public ServletOutputStream getOutputStream() throws IOException + { + if (_gzStream==null) + { + if (getResponse().isCommitted() || _noGzip) + return getResponse().getOutputStream(); + + _gzStream=newGzipStream(_request,(HttpServletResponse)getResponse(),_contentLength,_bufferSize,_minGzipSize); + } + else if (_writer!=null) + throw new IllegalStateException("getWriter() called"); + + return _gzStream; + } + + public PrintWriter getWriter() throws IOException + { + if (_writer==null) + { + if (_gzStream!=null) + throw new IllegalStateException("getOutputStream() called"); + + if (getResponse().isCommitted() || _noGzip) + return getResponse().getWriter(); + + _gzStream=newGzipStream(_request,(HttpServletResponse)getResponse(),_contentLength,_bufferSize,_minGzipSize); + _writer=newWriter(_gzStream,getCharacterEncoding()); + } + return _writer; + } + + void noGzip(boolean hard, String message) + { + LOGGER.debug("no gzip hard :{} {}",hard, message); + _noGzip=true; + if ( hard ) { + _hardNoGzip = true; + } + if (_gzStream!=null) + { + try + { + _gzStream.doNotGzip(); + } + catch (IOException e) + { + throw new IllegalStateException(); + } + } + } + + void doGzip() { + if (!_hardNoGzip && !isCommitted()) { + LOGGER.debug("Setting gzip"); + _noGzip = false; + if (_gzStream != null) { + try { + _gzStream.doGzip(); + } catch (IOException e) { + throw new IllegalStateException(); + } + } + } + } + + void finish() throws IOException + { + if (_writer!=null && !_gzStream._closed) + _writer.flush(); + if (_gzStream!=null) + _gzStream.finish(); + } + + protected GzipStream newGzipStream(HttpServletRequest request,HttpServletResponse response,long contentLength,int bufferSize, int minGzipSize) throws IOException + { + return new GzipStream(request,response,contentLength,bufferSize,minGzipSize); + } + } + + + public static class GzipStream extends ServletOutputStream + { + protected HttpServletRequest _request; + protected HttpServletResponse _response; + protected OutputStream _out; + protected ByteArrayOutputStream2 _bOut; + protected GZIPOutputStream _gzOut; + protected boolean _closed; + protected int _bufferSize; + protected int _minGzipSize; + protected long _contentLength; + + public GzipStream(HttpServletRequest request,HttpServletResponse response,long contentLength,int bufferSize, int minGzipSize) throws IOException + { + _request=request; + _response=response; + _contentLength=contentLength; + _bufferSize=bufferSize; + _minGzipSize=minGzipSize; + // KERN-1845 : java.lang.IllegalStateException: _gzOut != null + // if (minGzipSize==0) + // doGzip(); + } + + public void resetBuffer() + { + _closed=false; + _out=null; + _bOut=null; + if (_gzOut!=null && !_response.isCommitted()) + _response.setHeader("Content-Encoding",null); + _gzOut=null; + } + + public void setContentLength(long length) + { + _contentLength=length; + } + + public void flush() throws IOException + { + if (_out==null || _bOut!=null) + { + if (_contentLength>0 && _contentLength<_minGzipSize) + doNotGzip(); + else + doGzip(); + } + + _out.flush(); + } + + public void close() throws IOException + { + if (_request.getAttribute("javax.servlet.include.request_uri")!=null) + flush(); + else + { + if (_bOut!=null) + { + if (_contentLength<0) + _contentLength=_bOut.getCount(); + if (_contentLength<_minGzipSize) + doNotGzip(); + else + doGzip(); + } + else if (_out==null) + { + doNotGzip(); + } + + if (_gzOut!=null) + _gzOut.close(); + else + _out.close(); + _closed=true; + } + } + + public void finish() throws IOException + { + if (!_closed) + { + if (_out==null || _bOut!=null) + { + if (_contentLength>0 && _contentLength<_minGzipSize) + doNotGzip(); + else + doGzip(); + } + + if (_gzOut!=null && !_closed) + { + _closed=true; + _gzOut.close(); + } + } + } + + public void write(int b) throws IOException + { + checkOut(1); + _out.write(b); + } + + public void write(byte b[]) throws IOException + { + checkOut(b.length); + _out.write(b); + } + + public void write(byte b[], int off, int len) throws IOException + { + checkOut(len); + _out.write(b,off,len); + } + + protected boolean setContentEncodingGzip() + { + LOGGER.debug("Encoding as GZip content"); + _response.setHeader("Content-Encoding", "gzip"); + return _response.containsHeader("Content-Encoding"); + } + + public void doGzip() throws IOException + { + if (_gzOut==null) + { + if (_response.isCommitted()) + throw new IllegalStateException(); + + if (setContentEncodingGzip()) + { + _out=_gzOut=new GZIPOutputStream(_response.getOutputStream(),_bufferSize); + + if (_bOut!=null) + { + _out.write(_bOut.getBuf(),0,_bOut.getCount()); + _bOut=null; + } + } + else + doNotGzip(); + } + } + + public void doNotGzip() throws IOException + { + if (_gzOut!=null) + throw new IllegalStateException("_gzOut != null"); + if (_out==null || _bOut!=null ) + { + _out=_response.getOutputStream(); + if (_contentLength>=0) + { + if(_contentLength=0 && _contentLength<_minGzipSize)) + doNotGzip(); + else if (length>_minGzipSize) + doGzip(); + else + _out=_bOut=new ByteArrayOutputStream2(_bufferSize); + } + else if (_bOut!=null) + { + if (_response.isCommitted() || (_contentLength>=0 && _contentLength<_minGzipSize)) + doNotGzip(); + else if (length>=(_bOut.getBuf().length-_bOut.getCount())) + doGzip(); + } + } + } +} diff --git a/extensions/jetty/src/main/java/uk/co/tfd/sm/jetty/OSGiGZipFilter.java b/extensions/jetty/src/main/java/uk/co/tfd/sm/jetty/OSGiGZipFilter.java new file mode 100644 index 00000000..c79650d8 --- /dev/null +++ b/extensions/jetty/src/main/java/uk/co/tfd/sm/jetty/OSGiGZipFilter.java @@ -0,0 +1,65 @@ +package uk.co.tfd.sm.jetty; + +import org.apache.felix.http.api.ExtHttpService; +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Deactivate; +import org.apache.felix.scr.annotations.Properties; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; + +import java.io.IOException; +import java.util.Dictionary; +import java.util.Hashtable; +import java.util.Map; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; + +/** + * This class that operates as a managed service. + */ +@Component(enabled=false, immediate=true, metatype=true) +@Service(value=Filter.class) +@Properties(value={ + @Property(name="bufferSize", intValue=8192), + @Property(name="minGzipSize", intValue=8192), + @Property(name="mimeTypes", value="text/html,text/plain,text/css,text/javascript,text/xml,application/xml,application/xhtml+xml,application/rss+xml,application/javascript,application/x-javascript,application/json"), + @Property(name="excludedAgents", value="") + }) +public class OSGiGZipFilter extends GzipFilter { + + private static final String DEFAULT_USER_AGENT = "(?:Mozilla[^\\(]*\\(compatible;\\s*+([^;]*);.*)|(?:.*?([^\\s]+/[^\\s]+).*)"; + + @SuppressWarnings("unused") +@Property(value=DEFAULT_USER_AGENT) + private static final String PROP_USER_AGENT = "userAgent"; + + @Reference + protected ExtHttpService extHttpService; + + @SuppressWarnings("rawtypes") + @Activate + public void activate(Map properties) throws ServletException { + Hashtable props = new Hashtable(); + props.putAll(properties); + extHttpService.registerFilter(this, ".*", (Dictionary) properties, 100, null); + + } + + @Override + public void doFilter(ServletRequest arg0, ServletResponse arg1, FilterChain arg2) + throws IOException, ServletException { + super.doFilter(arg0, arg1, arg2); + } + + @Deactivate + public void deactivate(Map properties) { + extHttpService.unregisterFilter(this); + } + +} diff --git a/extensions/jetty/src/main/resources/META-INF/LICENSE b/extensions/jetty/src/main/resources/META-INF/LICENSE new file mode 100644 index 00000000..75b52484 --- /dev/null +++ b/extensions/jetty/src/main/resources/META-INF/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/extensions/jetty/src/main/resources/META-INF/MANIFEST.MF b/extensions/jetty/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 00000000..e1e0aeda --- /dev/null +++ b/extensions/jetty/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,45 @@ +Manifest-Version: 1.0 +Export-Package: org.apache.felix.http.api;uses:="javax.servlet,org.osg + i.service.http";version="2.0.4",org.osgi.service.http;uses:="javax.se + rvlet.http,javax.servlet";version="1.2",javax.servlet.resources;versi + on="2.5",javax.servlet;version="2.5",javax.servlet.jsp.resources;vers + ion="2.5",javax.servlet.http;uses:="javax.servlet";version="2.5",org. + sakaiproject.nakamura.api.servlet;version="0.10",org.mortbay.util.aja + x;version="6.1.22" +Built-By: ieb +Tool: Bnd-0.0.357 +Bundle-Name: Nakamura Http Jetty +Created-By: Hand +Bundle-Vendor: The Sakai Software Foundation +DynamicImport-Package: org.osgi.service.cm;version=1.2 +Build-Jdk: 1.6.0_13 +Bundle-Version: 2.2.0 +Bundle-Activator: org.apache.felix.http.jetty.internal.NakamuraActivat + or +Service-Component: OSGI-INF/serviceComponents.xml +Bundle-ManifestVersion: 2 +Bundle-License: http://www.apache.org/licenses/LICENSE-2.0.txt +Bundle-Description: This bundle is based on the Apache Felix Http Jett + y bundle with some small modifications for Nakamura. Apache Felix is + an OSGi implementation. +Bundle-DocURL: http://www.sakaiproject.org/ +Bundle-SymbolicName: org.apache.felix.http.jetty +Import-Package: javax.management, + javax.management.loading, + javax.management.modelmbean, + javax.net.ssl;resolution:=optional, + javax.security.cert;resolution:=optional, + javax.servlet;version="2.5";resolution:=optional, + javax.servlet.http;version="2.5";resolution:=optional, + javax.servlet.jsp.resources;version="2.5";resolution:=optional, + javax.servlet.resources;version="2.5";resolution:=optional, + javax.xml.parsers;resolution:=optional, + org.apache.felix.http.api;version="2.0";resolution:=optional, + org.osgi.framework;version="1.3";resolution:=optional, + org.osgi.service.http;version="1.2";resolution:=optional, + org.osgi.service.log;version="1.3";resolution:=optional, + org.osgi.util.tracker;version="1.3";resolution:=optional, + org.slf4j;resolution:=optional, + org.xml.sax;resolution:=optional, + org.xml.sax.helpers;resolution:=optional + diff --git a/extensions/jetty/src/main/resources/META-INF/NOTICE b/extensions/jetty/src/main/resources/META-INF/NOTICE new file mode 100644 index 00000000..2d587d91 --- /dev/null +++ b/extensions/jetty/src/main/resources/META-INF/NOTICE @@ -0,0 +1,14 @@ +Sakai Nakamura +Copyright 2009 The Sakai Foundation + +This product includes software developed at +The Sakai Foundation (http://www.sakaiproject.org/). + +----------------------------------------------------------- + +This product includes software (Apache Sling, Apache Felix, Apache Shindig and many other Apache products) +The Apache Software Foundation (http://www.apache.org/). + +Binary distributions of this product contain jars developed and licensed by other third parties, identified by the +LICENSE and NOTICE files included within each jar under the META-INF directory. + diff --git a/extensions/jetty/src/main/resources/OSGI-INF/metatype/metatype.xml b/extensions/jetty/src/main/resources/OSGI-INF/metatype/metatype.xml new file mode 100644 index 00000000..b1c89f47 --- /dev/null +++ b/extensions/jetty/src/main/resources/OSGI-INF/metatype/metatype.xml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + +@Property(name="alias", value="/"), +@Property(name="acceptRanges", boolValue=true), +@Property(name="dirAllowed", boolValue=true), +@Property(name="gzip", boolValue=true), +@Property(name="resourceBase", value=""), +@Property(name="relativeResourceBase", value=""), +@Property(name="aliases", boolValue=true), +@Property(name="maxCacheSize", intValue=""), +@Property(name="maxCachedFileSize", intValue=""), +@Property(name="relativeResourceBase", intValue=""), diff --git a/extensions/jetty/src/main/resources/OSGI-INF/serviceComponents.xml b/extensions/jetty/src/main/resources/OSGI-INF/serviceComponents.xml new file mode 100644 index 00000000..ae411d08 --- /dev/null +++ b/extensions/jetty/src/main/resources/OSGI-INF/serviceComponents.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/extensions/jetty/src/main/resources/jetty.xml b/extensions/jetty/src/main/resources/jetty.xml new file mode 100644 index 00000000..ce3d3598 --- /dev/null +++ b/extensions/jetty/src/main/resources/jetty.xml @@ -0,0 +1,73 @@ + + + + + + + + + + + + + + + + + + + + + 5 + 50 + 5 + 2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/extensions/memory/pom.xml b/extensions/memory/pom.xml new file mode 100644 index 00000000..605f15ff --- /dev/null +++ b/extensions/memory/pom.xml @@ -0,0 +1,115 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + uk.co.tfd.sm.memory + bundle + 0.1-SNAPSHOT + Sparse Map :: Memory Bundle + Caching Support for Sparse Map (uses Ehcache for underlying cache). + + + + org.apache.felix + maven-scr-plugin + + + org.apache.felix + maven-bundle-plugin + true + + + sparse-map + org.sakaiproject.nakamura.api.memory + + + !sun.misc, + !org.jgroups.*, + !org.hibernate.cache, + com.google.common.collect; version="9.0.0", + * + + uk.co.tfd.sm.memory.* + sun.misc.* + true + ehcache,backport-util-concurrent,jsr107cache + + + + + + + + com.googlecode.guava-osgi + guava-osgi + 9.0.0 + + + net.sf.ehcache + ehcache + 1.5.0 + + + backport-util-concurrent + backport-util-concurrent + 3.1 + + + net.sf.jsr107cache + jsr107cache + 1.0 + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5.1-SNAPSHOT + + + org.slf4j + slf4j-api + 1.5.10 + + + junit + junit + 4.4 + + + org.easymock + easymock + 2.5.2 + + + org.slf4j + slf4j-simple + 1.5.10 + + + org.apache.felix + org.apache.felix.scr.annotations + + + org.apache.felix + org.osgi.compendium + 1.2.0 + bundle + compile + + + org.apache.felix + org.osgi.core + 1.2.0 + bundle + compile + + + diff --git a/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/Cache.java b/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/Cache.java new file mode 100644 index 00000000..6f0835f1 --- /dev/null +++ b/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/Cache.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + +package org.sakaiproject.nakamura.api.memory; + +import java.util.List; + +/** + * A Cache managed by the cache manager. + */ +public interface Cache { + + /** + * Cache an object + * + * @param key + * The key with which to find the object. + * @param payload + * The object to cache. + * @param duration + * The time to cache the object (seconds). + */ + V put(String key, V payload); + + /** + * Test for a non expired entry in the cache. + * + * @param key + * The cache key. + * @return true if the key maps to a non-expired cache entry, false if not. + */ + boolean containsKey(String key); + + /** + * Get the non expired entry, or null if not there (or expired) + * + * @param key + * The cache key. + * @return The payload, or null if the payload is null, the key is not found, + * or the entry has expired (Note: use containsKey() to remove this + * ambiguity). + */ + V get(String key); + + /** + * Clear all entries. + */ + void clear(); + + /** + * Remove this entry from the cache. + * + * @param key + * The cache key. + */ + void remove(String key); + + /** + * Remove the key and any child keys from the cache, this is an expensive + * operation. + * + * @param key + */ + void removeChildren(String key); + + /** + * @return + */ + List list(); + +} diff --git a/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/CacheManagerService.java b/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/CacheManagerService.java new file mode 100644 index 00000000..ac9b4a18 --- /dev/null +++ b/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/CacheManagerService.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.memory; + + +/** + * The Cache manager provides access to all caches in the system. Caches are + * scoped by CacheScope and those that are bound can be unbound. + */ +public interface CacheManagerService { + + /** + * Get a cache to contain a specified type, with a defined scope. Getting a + * cache of the same name in the same scope will return the same cache for + * that scope. The thread invoking the method forms part of the scope for + * CacheScopes THREAD or REQUEST. + * + * @param The type of the elements, but be serializable for any non thread bound cache. + * @param name the name of the cache. + * @param scope the scope of the cache. + * @return the cache suitable for holding the type T + */ + Cache getCache(String name, CacheScope scope); + + /** + * Unbind the the context specified in scope. + * + * @param scope + */ + void unbind(CacheScope scope); +} diff --git a/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/CacheScope.java b/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/CacheScope.java new file mode 100644 index 00000000..350b0eff --- /dev/null +++ b/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/CacheScope.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.memory; + +/** + * Defines the scope of the cache + */ +public enum CacheScope { + /** + * Bind the Cache to the request scope. + */ + REQUEST(), + /** + * Bind a cache to the Thread, forever. + * WARNING: use with extreme caution, as any classes referenced in this type + * of cache will keep classloaders open and result in memory leaks + */ + THREAD(), + /** + * Bind the cache to the instance, one per instance. + */ + INSTANCE(), + /** + * Make the cache bound to the instance, but accept cluster wide invalidations. + */ + CLUSTERINVALIDATED(), + /** + * Replicate the cache over the whole cluster. + */ + CLUSTERREPLICATED(); + +} diff --git a/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/ThreadBound.java b/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/ThreadBound.java new file mode 100644 index 00000000..5453ec69 --- /dev/null +++ b/extensions/memory/src/main/java/org/sakaiproject/nakamura/api/memory/ThreadBound.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package org.sakaiproject.nakamura.api.memory; + +/** + * Objects that are added to a ThreadLocalManager, that implement this method + * will have the unbind method called when the object is ejected from the thread + * local. + * + */ +public interface ThreadBound { + + /** + * Invoked when the item is unbound from the thread. + */ + void unbind(); + +} diff --git a/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/CacheImpl.java b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/CacheImpl.java new file mode 100644 index 00000000..f90b13b9 --- /dev/null +++ b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/CacheImpl.java @@ -0,0 +1,176 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.memory.ehcache; + +import net.sf.ehcache.CacheManager; +import net.sf.ehcache.Element; + +import org.sakaiproject.nakamura.api.memory.Cache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; + +/** + * + */ +public class CacheImpl implements Cache { + + private static final Logger LOGGER = LoggerFactory.getLogger(CacheImpl.class); +private String cacheName; + private net.sf.ehcache.Cache cache; + private long miss; + private long hits; + private long gets; + + /** + * @param cacheManager + * @param name + */ + public CacheImpl(CacheManager cacheManager, String name) { + if (name == null) { + cacheName = "default"; + } else { + cacheName = name; + } + synchronized (cacheManager) { + cache = cacheManager.getCache(cacheName); + if (cache == null) { + cacheManager.addCache(cacheName); + cache = cacheManager.getCache(cacheName); + if (cache == null) { + throw new RuntimeException("Failed to create Cache with name " + cacheName); + } + } + } + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.memory.Cache#clear() + */ + public void clear() { + cache.removeAll(); + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.memory.Cache#containsKey(java.lang.String) + */ + public boolean containsKey(String key) { + return cache.isKeyInCache(key); + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.memory.Cache#get(java.lang.String) + */ + public V get(String key) { + Element e = cache.get(key); + if (e == null) { + return stats(null); + } + return stats(e.getObjectValue()); + } + + + + @SuppressWarnings("unchecked") + private V stats(Object objectValue) { + if ( objectValue == null ) { + miss++; + } else { + hits++; + } + gets++; + if ( gets % 1000 == 0 ) { + long hp = (100*hits)/gets; + long mp = (100*miss)/gets; + LOGGER.info("{} Cache Stats hits {} ({}%), misses {} ({}%), calls {}",new Object[]{cacheName,hits,hp,miss,mp,gets}); + } + return (V) objectValue; + } + +/** + * {@inherit-doc} + * + * @see org.sakaiproject.nakamura.api.memory.Cache#put(java.lang.String, java.lang.Object) + */ + @SuppressWarnings("unchecked") + public V put(String key, V payload) { + V previous = null; + if (cache.isKeyInCache(key)) { + Element e = cache.get(key); + if (e != null) { + previous = (V) e.getObjectValue(); + } + } + cache.put(new Element(key, payload)); + return previous; + } + + /** + * {@inherit-doc} + * + * @see org.sakaiproject.nakamura.api.memory.Cache#remove(java.lang.String) + */ + public void remove(String key) { + cache.remove(key); + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.memory.Cache#removeChildren(java.lang.String) + */ + public void removeChildren(String key) { + cache.remove(key); + if (!key.endsWith("/")) { + key = key + "/"; + } + List keys = cache.getKeys(); + for (Object k : keys) { + if (((String) k).startsWith(key)) { + cache.remove(k); + } + } + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.memory.Cache#list() + */ + @SuppressWarnings("unchecked") + public List list() { + List keys = cache.getKeys(); + List values = new ArrayList(); + for (String k : keys) { + Element e = cache.get(k); + if ( e != null ) { + values.add((V) e.getObjectValue()); + } + } + return values; + } + +} diff --git a/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/CacheManagerServiceImpl.java b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/CacheManagerServiceImpl.java new file mode 100644 index 00000000..e7179598 --- /dev/null +++ b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/CacheManagerServiceImpl.java @@ -0,0 +1,301 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.memory.ehcache; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.lang.management.ManagementFactory; +import java.lang.ref.WeakReference; +import java.util.HashMap; +import java.util.Map; + +import javax.management.MBeanServer; + +import net.sf.ehcache.CacheManager; +import net.sf.ehcache.management.ManagementService; + +import org.apache.commons.io.IOUtils; +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Deactivate; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.memory.Cache; +import org.sakaiproject.nakamura.api.memory.CacheManagerService; +import org.sakaiproject.nakamura.api.memory.CacheScope; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The CacheManagerServiceImpl + */ +@Component(immediate = true, metatype=true) +@Service(value=CacheManagerService.class) +public class CacheManagerServiceImpl implements CacheManagerService { + + public static final String DEFAULT_CACHE_CONFIG = "sling/ehcacheConfig.xml"; + + @Property( value = DEFAULT_CACHE_CONFIG) + public static final String CACHE_CONFIG = "cache-config"; + + @Property(value = "The Sakai Foundation") + static final String SERVICE_VENDOR = "service.vendor"; + + @Property(value = "Cache Manager Service Implementation") + static final String SERVICE_DESCRIPTION = "service.description"; + + @Property() + public static final String BIND_ADDRESS = "bind-address"; + + @Property(value="sling/ehcache/data") + public static final String CACHE_STORE = "cache-store"; + + private static final String CONFIG_PATH = "uk/co/tfd/sm/memory/ehcache/ehcacheConfig.xml"; + + private static final Logger LOGGER = LoggerFactory.getLogger(CacheManagerServiceImpl.class); + private CacheManager cacheManager; + private Map> caches = new HashMap>(); + private ThreadLocalCacheMap requestCacheMapHolder = new ThreadLocalCacheMap(); + private ThreadLocalCacheMap threadCacheMapHolder = new ThreadLocalCacheMap(); + + public CacheManagerServiceImpl() throws IOException { + } + + @Activate + public void activate(Map properties) throws FileNotFoundException, IOException { + String config = toString(properties.get(CACHE_CONFIG), DEFAULT_CACHE_CONFIG); + File configFile = new File(config); + if ( configFile.exists() ) { + LOGGER.info("Configuring Cache from {} ",configFile.getAbsolutePath()); + InputStream in = null; + try { + in = processConfig(new FileInputStream(configFile), properties); + cacheManager = new CacheManager(in); + } finally { + if ( in != null ) { + in.close(); + } + } + } else { + LOGGER.info("Configuring Cache from Classpath Default {} ", CONFIG_PATH); + InputStream in = processConfig(this.getClass().getClassLoader().getResourceAsStream(CONFIG_PATH), properties); + if ( in == null ) { + throw new IOException("Unable to open config at classpath location "+CONFIG_PATH); + } + cacheManager = new CacheManager(in); + in.close(); + } + + final WeakReference ref = new WeakReference(this); + /* + * Add in a shutdown hook, for safety + */ + Runtime.getRuntime().addShutdownHook(new Thread() { + /* + * (non-Javadoc) + * + * @see java.lang.Thread#run() + */ + @Override + public void run() { + try { + CacheManagerServiceImpl cm = ref.get(); + if ( cm != null ) { + cm.deactivate(); + } + } catch (Throwable t) { + LOGGER.debug(t.getMessage(),t); + } + } + }); + + // register the cache manager with JMX + MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer(); + ManagementService.registerMBeans(cacheManager, mBeanServer, true, true, + true, true); + + } + + private String toString(Object object, String defaultValue) { + if ( object == null ) { + return defaultValue; + } + return String.valueOf(object); + } + + private InputStream processConfig(InputStream in, + Map properties) throws IOException { + if ( in == null ) { + return null; + } + StringBuilder config = new StringBuilder(IOUtils.toString(in, "UTF-8")); + in.close(); + int pos = 0; + for(;;) { + int start = config.indexOf("${",pos); + if ( start < 0 ) { + break; + } + int end = config.indexOf("}", start); + if ( end < 0 ) { + throw new IllegalArgumentException( + "Config file malformed, unterminated variable " + + config.substring(start, + Math.min(start + 10, config.length()))); + } + String key = config.substring(start+2, end); + if ( properties.containsKey(key)) { + String replacement = (String) properties.get(key); + config.replace(start, end+1, replacement); + pos = start + replacement.length(); + } else { + throw new IllegalArgumentException("Missing replacement property "+key); + } + } + return new ByteArrayInputStream(config.toString().getBytes("UTF-8")); + + } + +/** + * perform a shutdown + */ + @Deactivate + public void deactivate() { + if ( cacheManager != null ) { + cacheManager.shutdown(); + cacheManager = null; + } + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.memory.CacheManagerService#getCache(java.lang.String) + */ + public Cache getCache(String name, CacheScope scope) { + switch (scope) { + case INSTANCE: + return getInstanceCache(name); + case CLUSTERINVALIDATED: + return getInstanceCache(name); + case CLUSTERREPLICATED: + return getInstanceCache(name); + case REQUEST: + return getRequestCache(name); + case THREAD: + return getThreadCache(name); + default: + return getInstanceCache(name); + } + } + + /** + * Generate a cache bound to the thread. + * + * @param name + * @return + */ + @SuppressWarnings("unchecked") + private Cache getThreadCache(String name) { + Map> threadCacheMap = threadCacheMapHolder.get(); + Cache threadCache = (Cache) threadCacheMap.get(name); + if (threadCache == null) { + threadCache = new MapCacheImpl(); + threadCacheMap.put(name, threadCache); + } + return threadCache; + } + + /** + * Generate a cache bound to the request + * + * @param name + * @return + */ + @SuppressWarnings("unchecked") + private Cache getRequestCache(String name) { + Map> requestCacheMap = requestCacheMapHolder.get(); + Cache requestCache = (Cache) requestCacheMap.get(name); + if (requestCache == null) { + requestCache = new MapCacheImpl(); + requestCacheMap.put(name, requestCache); + } + return requestCache; + } + + /** + * @param name + * @return + */ + @SuppressWarnings("unchecked") + private Cache getInstanceCache(String name) { + if (name == null) { + return new CacheImpl(cacheManager, null); + } else { + Cache c = (Cache) caches.get(name); + if (c == null) { + c = new CacheImpl(cacheManager, name); + caches.put(name, c); + } + return c; + } + } + + /** + * {@inheritDoc} + * + * @see org.sakaiproject.nakamura.api.memory.CacheManagerService#unbind(org.sakaiproject.nakamura.api.memory.CacheScope) + */ + public void unbind(CacheScope scope) { + switch (scope) { + case REQUEST: + unbindRequest(); + break; + case THREAD: + unbindThread(); + break; + } + } + + /** + * + */ + private void unbindThread() { + Map> threadCache = threadCacheMapHolder.get(); + for (Cache cache : threadCache.values()) { + cache.clear(); + } + threadCacheMapHolder.remove(); + } + + /** + * + */ + private void unbindRequest() { + Map> requestCache = requestCacheMapHolder.get(); + for (Cache cache : requestCache.values()) { + cache.clear(); + } + requestCacheMapHolder.remove(); + } + +} diff --git a/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/MapCacheImpl.java b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/MapCacheImpl.java new file mode 100644 index 00000000..efab9d48 --- /dev/null +++ b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/MapCacheImpl.java @@ -0,0 +1,111 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.memory.ehcache; + +import org.sakaiproject.nakamura.api.memory.Cache; +import org.sakaiproject.nakamura.api.memory.ThreadBound; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Set; + +/** + * + */ +public class MapCacheImpl extends HashMapimplements Cache { + + + /** + * + */ + private static final long serialVersionUID = -5400056532743570231L; + + /** + * {@inheritDoc} + * @see org.sakaiproject.nakamura.api.memory.Cache#containsKey(java.lang.String) + */ + public boolean containsKey(String key) { + return super.containsKey(key); + } + + /** + * {@inheritDoc} + * @see org.sakaiproject.nakamura.api.memory.Cache#get(java.lang.String) + */ + public V get(String key) { + return super.get(key); + } + + /** + * {@inheritDoc} + * @see org.sakaiproject.nakamura.api.memory.Cache#remove(java.lang.String) + */ + public void remove(String key) { + V o = super.remove(key); + if ( o instanceof ThreadBound ) { + ((ThreadBound) o).unbind(); + } + } + + + /** + * {@inheritDoc} + * @see java.util.HashMap#clear() + */ + @Override + public void clear() { + for ( String k : super.keySet() ) { + Object o = get(k); + if( o instanceof ThreadBound ) { + ((ThreadBound) o).unbind(); + } + } + super.clear(); + } + + /** + * {@inheritDoc} + * @see org.sakaiproject.nakamura.api.memory.Cache#removeChildren(java.lang.String) + */ + public void removeChildren(String key) { + super.remove(key); + if ( !key.endsWith("/") ) { + key = key + "/"; + } + Set keys = super.keySet(); + for ( String k : keys) { + if ( (k).startsWith(key) ) { + super.remove(k); + } + } + } + + /** + * {@inheritDoc} + * @see org.sakaiproject.nakamura.api.memory.Cache#list() + */ + public List list() { + return new ArrayList(super.values()); + } + + + + + +} diff --git a/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/MapDeligate.java b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/MapDeligate.java new file mode 100644 index 00000000..f99446c6 --- /dev/null +++ b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/MapDeligate.java @@ -0,0 +1,68 @@ +package uk.co.tfd.sm.memory.ehcache; + +import org.sakaiproject.nakamura.api.memory.Cache; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; + +public class MapDeligate implements Map { + + private Cache cache; + + public MapDeligate(Cache cache) { + this.cache = cache; + } + + public void clear() { + cache.clear(); + } + + public boolean containsKey(Object key) { + return cache.containsKey((java.lang.String) key); + } + + public boolean containsValue(Object value) { + throw new UnsupportedOperationException("This map is lookup only."); + } + + public Set> entrySet() { + throw new UnsupportedOperationException("This map is lookup only."); + } + + public V get(Object key) { + return cache.get((String) key); + } + + public boolean isEmpty() { + return false; + } + + public Set keySet() { + throw new UnsupportedOperationException("This map is lookup only."); + } + + public V put(K key, V value) { + return cache.put((String) key, value); + } + + public void putAll(Map m) { + throw new UnsupportedOperationException("This map is singly add only, use an iterator or loop."); + } + + public int size() { + throw new UnsupportedOperationException("This map is lookup only."); + } + + public V remove(Object key) { + V value = cache.get((String) key); + cache.remove((String) key); + return value; + } + + public Collection values() { + throw new UnsupportedOperationException("This map is lookup only."); + } + + +} diff --git a/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/StorageCacheManagerImpl.java b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/StorageCacheManagerImpl.java new file mode 100644 index 00000000..9c99f65b --- /dev/null +++ b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/StorageCacheManagerImpl.java @@ -0,0 +1,72 @@ +package uk.co.tfd.sm.memory.ehcache; + +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Deactivate; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.CacheHolder; +import org.sakaiproject.nakamura.api.lite.StorageCacheManager; +import org.sakaiproject.nakamura.api.memory.Cache; +import org.sakaiproject.nakamura.api.memory.CacheManagerService; +import org.sakaiproject.nakamura.api.memory.CacheScope; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; + +import java.util.Map; + +@Component(immediate=true, metatype=true) +@Service(value=StorageCacheManager.class) +public class StorageCacheManagerImpl implements StorageCacheManager { + + + @Reference + private CacheManagerService cacheManagerService; + + private Map> knownCaches; + + @Activate + public void activate(Map props) { + Builder> b = ImmutableMap.builder(); + Cache accesssControlCacheCache = cacheManagerService.getCache("accessControlCache", CacheScope.CLUSTERINVALIDATED); + Cache authorizableCacheCache = cacheManagerService.getCache("authorizableCache", CacheScope.CLUSTERINVALIDATED); + Cache contentCacheCache = cacheManagerService.getCache("contentCache", CacheScope.CLUSTERINVALIDATED); + Cache queryCache = cacheManagerService.getCache("queryCache", CacheScope.CLUSTERINVALIDATED); + b.put("ac", new MapDeligate(accesssControlCacheCache)); + b.put("au", new MapDeligate(authorizableCacheCache)); + b.put("cn", new MapDeligate(contentCacheCache)); + b.put("sparseQueryCache", new MapDeligate(queryCache)); + knownCaches = b.build(); + } + + @Deactivate + public void deactivate(Map props) { + } + + + @Override + public Map getAccessControlCache() { + return getCache("ac"); + } + + @Override + public Map getAuthorizableCache() { + return getCache("au"); + } + + @Override + public Map getContentCache() { + return getCache("cn"); + } + + @Override + public Map getCache(String cacheName) { + if ( knownCaches.containsKey(cacheName)) { + return knownCaches.get(cacheName); + } + Cache cache = cacheManagerService.getCache(cacheName, CacheScope.CLUSTERINVALIDATED); + return new MapDeligate(cache); + } + +} diff --git a/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/ThreadLocalCacheMap.java b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/ThreadLocalCacheMap.java new file mode 100644 index 00000000..c7b0fc6f --- /dev/null +++ b/extensions/memory/src/main/java/uk/co/tfd/sm/memory/ehcache/ThreadLocalCacheMap.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.memory.ehcache; + +import org.sakaiproject.nakamura.api.memory.Cache; + +import java.util.HashMap; +import java.util.Map; + +/** + * Represents a Cache stored on the thread, used in the request thread and in other threads. + * When used in Threads there is a potential for memory leaks as perm space is not cleaned up. + * This will be caused by references to classloaders being in the Map, and keeping the classloaders + * open. + */ +public class ThreadLocalCacheMap extends ThreadLocal>> { + /** + * {@inheritDoc} + * @see java.lang.ThreadLocal#initialValue() + */ + @Override + protected Map> initialValue() { + return new HashMap>(); + } +} diff --git a/extensions/memory/src/main/resources/META-INF/LICENSE b/extensions/memory/src/main/resources/META-INF/LICENSE new file mode 100644 index 00000000..75b52484 --- /dev/null +++ b/extensions/memory/src/main/resources/META-INF/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/extensions/memory/src/main/resources/META-INF/NOTICE b/extensions/memory/src/main/resources/META-INF/NOTICE new file mode 100644 index 00000000..2d587d91 --- /dev/null +++ b/extensions/memory/src/main/resources/META-INF/NOTICE @@ -0,0 +1,14 @@ +Sakai Nakamura +Copyright 2009 The Sakai Foundation + +This product includes software developed at +The Sakai Foundation (http://www.sakaiproject.org/). + +----------------------------------------------------------- + +This product includes software (Apache Sling, Apache Felix, Apache Shindig and many other Apache products) +The Apache Software Foundation (http://www.apache.org/). + +Binary distributions of this product contain jars developed and licensed by other third parties, identified by the +LICENSE and NOTICE files included within each jar under the META-INF directory. + diff --git a/extensions/memory/src/main/resources/uk/co/tfd/sm/memory/ehcache/ehcacheConfig.xml b/extensions/memory/src/main/resources/uk/co/tfd/sm/memory/ehcache/ehcacheConfig.xml new file mode 100644 index 00000000..afdb1804 --- /dev/null +++ b/extensions/memory/src/main/resources/uk/co/tfd/sm/memory/ehcache/ehcacheConfig.xml @@ -0,0 +1,11 @@ + + + + + \ No newline at end of file diff --git a/extensions/memory/src/test/java/uk/co/tfd/sm/memory/ehcache/CacheConfigTest.java b/extensions/memory/src/test/java/uk/co/tfd/sm/memory/ehcache/CacheConfigTest.java new file mode 100644 index 00000000..cb7dcaff --- /dev/null +++ b/extensions/memory/src/test/java/uk/co/tfd/sm/memory/ehcache/CacheConfigTest.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.memory.ehcache; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Map; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.sakaiproject.nakamura.api.memory.Cache; +import org.sakaiproject.nakamura.api.memory.CacheScope; +import org.sakaiproject.nakamura.api.memory.ThreadBound; + +import com.google.common.collect.ImmutableMap; + +public class CacheConfigTest { + + private CacheManagerServiceImpl cacheManagerService; + + @Before + public void setUp() throws IOException, InstantiationException, + IllegalAccessException, ClassNotFoundException { + cacheManagerService = new CacheManagerServiceImpl(); + Map properties = ImmutableMap.of(CacheManagerServiceImpl.CACHE_STORE, + (Object) "target/ehcache/store", + CacheManagerServiceImpl.BIND_ADDRESS, "127.0.0.1", + CacheManagerServiceImpl.CACHE_CONFIG, "src/test/resources/testconfig/simple-ehcacheConfig.xml" + ); + cacheManagerService.activate(properties); + } + + @After + public void tearDown() { + cacheManagerService.deactivate(); + } + + private void exerciseCache(String cacheName, CacheScope scope) { + Cache cache = cacheManagerService.getCache(cacheName, scope); + cache.put("fish", "cat"); + assertTrue("Expected element to be in cache", cache.containsKey("fish")); + Cache sameCache = cacheManagerService + .getCache(cacheName, scope); + assertEquals("Expected cache to work", "cat", sameCache.get("fish")); + sameCache.put("fish", "differentcat"); + assertEquals("Expected cache value to propogate", "differentcat", + cache.get("fish")); + sameCache.remove("fish"); + sameCache.remove("another"); + assertNull("Expected item to be removed from cache", cache.get("fish")); + cache.put("foo", "bar"); + cache.clear(); + assertNull("Expected cache to be empty", cache.get("foo")); + cacheManagerService.unbind(scope); + } + + @Test + public void testCacheStorage() { + for (CacheScope scope : CacheScope.values()) { + exerciseCache("TestCache", scope); + } + } + + @Test + public void testNullCacheNames() { + for (CacheScope scope : CacheScope.values()) { + exerciseCache(null, scope); + } + } + + @Test + public void testCacheWithChildKeys() { + for (CacheScope scope : CacheScope.values()) { + String cacheName = "SomeTestCache"; + Cache cache = cacheManagerService + .getCache(cacheName, scope); + cache.put("fish", "cat"); + assertTrue("Expected element to be in cache", + cache.containsKey("fish")); + cache.put("fish/child", "childcat"); + cache.put("fish/child/child", "childcatchild"); + Cache sameCache = cacheManagerService.getCache(cacheName, + scope); + sameCache.removeChildren("fish/child/child"); + assertNull("Expected key to be removed", + cache.get("fish/child/child")); + sameCache.removeChildren("fish"); + assertNull("Expected key to be removed", cache.get("fish")); + assertNull("Expected key to be removed", cache.get("fish/child")); + } + } + + @Test + public void testThreadUnbinding() { + ThreadBound testItem = createMock(ThreadBound.class); + testItem.unbind(); + testItem.unbind(); + replay(testItem); + Cache threadBoundCache = cacheManagerService.getCache( + "testCache", CacheScope.THREAD); + threadBoundCache.put("testItem", testItem); + threadBoundCache.remove("testItem"); + threadBoundCache.put("testItem", testItem); + threadBoundCache.clear(); + verify(testItem); + } + +} diff --git a/extensions/memory/src/test/java/uk/co/tfd/sm/memory/ehcache/TestCache.java b/extensions/memory/src/test/java/uk/co/tfd/sm/memory/ehcache/TestCache.java new file mode 100644 index 00000000..29763a10 --- /dev/null +++ b/extensions/memory/src/test/java/uk/co/tfd/sm/memory/ehcache/TestCache.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.memory.ehcache; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Map; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.sakaiproject.nakamura.api.memory.Cache; +import org.sakaiproject.nakamura.api.memory.CacheScope; +import org.sakaiproject.nakamura.api.memory.ThreadBound; + +import com.google.common.collect.ImmutableMap; + +public class TestCache { + + private CacheManagerServiceImpl cacheManagerService; + + @Before + public void setUp() throws IOException, InstantiationException, IllegalAccessException, + ClassNotFoundException { + cacheManagerService = new CacheManagerServiceImpl(); + Map properties = ImmutableMap.of("cache-store",(Object)"target/ehcache/store", "bind-address", "127.0.0.1"); + cacheManagerService.activate(properties); + } + + @After + public void tearDown() { + cacheManagerService.deactivate(); + } + + private void exerciseCache(String cacheName, CacheScope scope) { + Cache cache = cacheManagerService.getCache(cacheName, scope); + cache.put("fish", "cat"); + assertTrue("Expected element to be in cache", cache.containsKey("fish")); + Cache sameCache = cacheManagerService.getCache(cacheName, scope); + assertEquals("Expected cache to work", "cat", sameCache.get("fish")); + sameCache.put("fish", "differentcat"); + assertEquals("Expected cache value to propogate", "differentcat", cache.get("fish")); + sameCache.remove("fish"); + sameCache.remove("another"); + assertNull("Expected item to be removed from cache", cache.get("fish")); + cache.put("foo", "bar"); + cache.clear(); + assertNull("Expected cache to be empty", cache.get("foo")); + cacheManagerService.unbind(scope); + } + + @Test + public void testCacheStorage() { + for (CacheScope scope : CacheScope.values()) { + exerciseCache("TestCache", scope); + } + } + + @Test + public void testNullCacheNames() { + for (CacheScope scope : CacheScope.values()) { + exerciseCache(null, scope); + } + } + + @Test + public void testCacheWithChildKeys() { + for (CacheScope scope : CacheScope.values()) { + String cacheName = "SomeTestCache"; + Cache cache = cacheManagerService.getCache(cacheName, scope); + cache.put("fish", "cat"); + assertTrue("Expected element to be in cache", cache.containsKey("fish")); + cache.put("fish/child", "childcat"); + cache.put("fish/child/child", "childcatchild"); + Cache sameCache = cacheManagerService.getCache(cacheName, scope); + sameCache.removeChildren("fish/child/child"); + assertNull("Expected key to be removed", cache.get("fish/child/child")); + sameCache.removeChildren("fish"); + assertNull("Expected key to be removed", cache.get("fish")); + assertNull("Expected key to be removed", cache.get("fish/child")); + } + } + + @Test + public void testThreadUnbinding() { + ThreadBound testItem = createMock(ThreadBound.class); + testItem.unbind(); + testItem.unbind(); + replay(testItem); + Cache threadBoundCache = cacheManagerService.getCache("testCache", + CacheScope.THREAD); + threadBoundCache.put("testItem", testItem); + threadBoundCache.remove("testItem"); + threadBoundCache.put("testItem", testItem); + threadBoundCache.clear(); + verify(testItem); + } + +} diff --git a/extensions/memory/src/test/resources/testconfig/cluster-ehcacheConfig.xml b/extensions/memory/src/test/resources/testconfig/cluster-ehcacheConfig.xml new file mode 100644 index 00000000..2e11504c --- /dev/null +++ b/extensions/memory/src/test/resources/testconfig/cluster-ehcacheConfig.xml @@ -0,0 +1,235 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/extensions/memory/src/test/resources/testconfig/simple-ehcacheConfig.xml b/extensions/memory/src/test/resources/testconfig/simple-ehcacheConfig.xml new file mode 100644 index 00000000..afdb1804 --- /dev/null +++ b/extensions/memory/src/test/resources/testconfig/simple-ehcacheConfig.xml @@ -0,0 +1,11 @@ + + + + + \ No newline at end of file diff --git a/extensions/milton/pom.xml b/extensions/milton/pom.xml new file mode 100644 index 00000000..0e98697c --- /dev/null +++ b/extensions/milton/pom.xml @@ -0,0 +1,104 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.milton + bundle + 1.6.4-SNAPSHOT + Sparse Map :: Milton Support + Provides Milton Jar + + UTF-8 + + + + + org.apache.felix + maven-bundle-plugin + true + + + + http://groups.google.com/group/sakai-nakamura + + Timefields Ltd + ${project.artifactId} + sparse-map,driver + com.bradmcevoy.*;-split-package:=merge-first,com.ettrema.* + + !org.springframework.*, + !oracle.*, + !org.jaxen.*, + !org.apache.xerces.*, + * + + + milton-api, + milton-servlet, + mime-util, + jdom, + commons-beanutils + + + + + + + + + com.ettrema + milton-api + 1.6.4 + + + eu.medsea.mimeutil + mime-util + 2.1.3 + + + org.jdom + jdom + 1.1 + + + commons-beanutils + commons-beanutils + 1.8.3 + + + com.ettrema + milton-servlet + 1.6.4 + + + spring + org.springframework + + + spring-webmvc + org.springframework + + + + + + + milton-releases + Milton Releases + + true + + + false + + http://www.ettrema.com/maven2/ + + + diff --git a/extensions/milton/src/main/java/ignore/com/bradmcevoy/http/StandardFilter.java b/extensions/milton/src/main/java/ignore/com/bradmcevoy/http/StandardFilter.java new file mode 100644 index 00000000..40b943d9 --- /dev/null +++ b/extensions/milton/src/main/java/ignore/com/bradmcevoy/http/StandardFilter.java @@ -0,0 +1,67 @@ +package ignore.com.bradmcevoy.http; + +import ignore.com.bradmcevoy.http.webdav.RuntimeBadRequestException; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.bradmcevoy.http.Filter; +import com.bradmcevoy.http.FilterChain; +import com.bradmcevoy.http.Handler; +import com.bradmcevoy.http.HttpManager; +import com.bradmcevoy.http.Request; +import com.bradmcevoy.http.Response; +import com.bradmcevoy.http.exceptions.BadRequestException; +import com.bradmcevoy.http.exceptions.ConflictException; +import com.bradmcevoy.http.exceptions.NotAuthorizedException; + +public class StandardFilter implements Filter { + + private Logger log = LoggerFactory.getLogger( StandardFilter.class ); + public static final String INTERNAL_SERVER_ERROR_HTML = "

    Internal Server Error (500)

    "; + + public StandardFilter() { + } + + public void process( FilterChain chain, Request request, Response response ) { + HttpManager manager = chain.getHttpManager(); + try { + Request.Method method = request.getMethod(); + + Handler handler = manager.getMethodHandler( method ); + if( handler == null ) { + log.trace( "No handler for: " + method ); + manager.getResponseHandler().respondMethodNotImplemented( null, response, request ); + } else { + if( log.isTraceEnabled() ) { + log.trace( "delegate to method handler: " + handler.getClass().getCanonicalName() ); + } + handler.process( manager, request, response ); + } +// ieb modification start + } catch (RuntimeBadRequestException ex ) { + log.warn( "BadRequestException: " + ex.getReason() ); + manager.getResponseHandler().respondBadRequest( null, response, request ); +// ieb modifiation end + } catch( BadRequestException ex ) { + log.warn( "BadRequestException: " + ex.getReason() ); + manager.getResponseHandler().respondBadRequest( ex.getResource(), response, request ); + } catch( ConflictException ex ) { + log.warn( "conflictException: " + ex.getMessage() ); + manager.getResponseHandler().respondConflict( ex.getResource(), response, request, INTERNAL_SERVER_ERROR_HTML ); + } catch( NotAuthorizedException ex ) { + log.warn( "NotAuthorizedException" ); + manager.getResponseHandler().respondUnauthorised( ex.getResource(), response, request ); + } catch( Throwable e ) { + log.error( "process", e ); + try { + manager.getResponseHandler().respondServerError( request, response, INTERNAL_SERVER_ERROR_HTML ); + } catch( Throwable ex ) { + log.error( "Exception generating server error response, setting response status to 500", ex ); + response.setStatus( Response.Status.SC_INTERNAL_SERVER_ERROR ); + } + } finally { + response.close(); + } + } +} diff --git a/extensions/milton/src/main/java/ignore/com/bradmcevoy/http/webdav/DefaultPropFindRequestFieldParser.java b/extensions/milton/src/main/java/ignore/com/bradmcevoy/http/webdav/DefaultPropFindRequestFieldParser.java new file mode 100644 index 00000000..1f7458fe --- /dev/null +++ b/extensions/milton/src/main/java/ignore/com/bradmcevoy/http/webdav/DefaultPropFindRequestFieldParser.java @@ -0,0 +1,74 @@ +package ignore.com.bradmcevoy.http.webdav; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.LinkedHashSet; +import java.util.Set; + +import javax.xml.namespace.QName; + +import org.apache.commons.io.output.ByteArrayOutputStream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.xml.sax.InputSource; +import org.xml.sax.SAXException; +import org.xml.sax.XMLReader; +import org.xml.sax.helpers.XMLReaderFactory; + +import com.bradmcevoy.http.webdav.PropFindRequestFieldParser; +import com.bradmcevoy.http.webdav.PropFindSaxHandler; +import com.bradmcevoy.io.StreamUtils; +// This was modified to correct bad xml processing. +/** + * Simple implmentation which just parses the request body. If no xml is present + * it will return an empty set. + * + * Note this generally shouldnt be used directly, but should be wrapped by + * MSPropFindRequestFieldParser to support windows clients. + * + * @author brad + */ +public class DefaultPropFindRequestFieldParser implements PropFindRequestFieldParser { + + private static final Logger log = LoggerFactory.getLogger( DefaultPropFindRequestFieldParser.class ); + + public DefaultPropFindRequestFieldParser() { + } + + public ParseResult getRequestedFields( InputStream in ) { + try { + final Set set = new LinkedHashSet(); + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + StreamUtils.readTo( in, bout, false, true ); + byte[] arr = bout.toByteArray(); + if( arr.length > 1 ) { + ByteArrayInputStream bin = new ByteArrayInputStream( arr ); + XMLReader reader = XMLReaderFactory.createXMLReader(); + PropFindSaxHandler handler = new PropFindSaxHandler(); + reader.setContentHandler( handler ); + try { + reader.parse( new InputSource( bin ) ); + if( handler.isAllProp() ) { + return new ParseResult( true, set ); + } else { + set.addAll( handler.getAttributes().keySet() ); + } + } catch( IOException e ) { + log.warn( "exception parsing request body", e ); + // ignore + } catch( SAXException e ) { + log.warn( "exception parsing request body", e ); +// ieb modification start + throw new RuntimeBadRequestException(e.getMessage(), e); +// ieb modificatoin end + } + } + return new ParseResult( false, set ); + } catch ( RuntimeException ex ) { + throw ex; + } catch( Exception ex ) { + throw new RuntimeException( ex ); + } + } +} diff --git a/extensions/milton/src/main/java/ignore/com/bradmcevoy/http/webdav/RuntimeBadRequestException.java b/extensions/milton/src/main/java/ignore/com/bradmcevoy/http/webdav/RuntimeBadRequestException.java new file mode 100644 index 00000000..990a66ee --- /dev/null +++ b/extensions/milton/src/main/java/ignore/com/bradmcevoy/http/webdav/RuntimeBadRequestException.java @@ -0,0 +1,23 @@ +package ignore.com.bradmcevoy.http.webdav; + + +public class RuntimeBadRequestException extends RuntimeException { + + + /** + * + */ + private static final long serialVersionUID = -1004634408897616343L; + private String reason; + + public RuntimeBadRequestException(String message, Exception e) { + super(message, e); + this.reason = message; + } + + public String getReason() { + return reason; + } + + +} diff --git a/extensions/pom.xml b/extensions/pom.xml new file mode 100644 index 00000000..f237e4e4 --- /dev/null +++ b/extensions/pom.xml @@ -0,0 +1,29 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../pom.xml + + extensions + pom + 0.1-SNAPSHOT + Sparse Map :: Extensions Builder + Reactor to build extensions. + + http + jetty + memory + milton + proxy + template + webdav + jaxrs + resource + integration + app + + diff --git a/extensions/proxy/pom.xml b/extensions/proxy/pom.xml new file mode 100644 index 00000000..9087771a --- /dev/null +++ b/extensions/proxy/pom.xml @@ -0,0 +1,125 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + uk.co.tfd.sm.proxy + bundle + 0.1-SNAPSHOT + Sparse Map :: Proxy Processor Bundle + Provides Proxy support. + + + + org.apache.felix + maven-scr-plugin + + + org.apache.felix + maven-bundle-plugin + true + + + sparse-map + uk.co.tfd.sm.api.proxy + uk.co.tfd.sm.proxy.* + + org.apache.commons.io; version="1.4", + com.google.common.collect; version="9.0.0", + * + + true + httpclient,httpmime,httpcore + + + + + + + + org.slf4j + slf4j-simple + 1.5.10 + + + org.apache.felix + org.apache.felix.scr.annotations + + + com.googlecode.guava-osgi + guava-osgi + 9.0.0 + + + junit + junit + 4.4 + jar + compile + + + javax.servlet + servlet-api + 2.4 + jar + compile + + + commons-io + commons-io + 1.4 + jar + compile + + + org.apache.httpcomponents + httpclient + 4.1.2 + jar + compile + + + org.apache.httpcomponents + httpmime + 4.1.2 + jar + compile + + + org.mockito + mockito-all + 1.8.5 + jar + test + + + org.mortbay.jetty + jetty + 6.1.15 + jar + test + + + com.google.code.gson + gson + 1.7.1 + jar + compile + + + org.sakaiproject.nakamura + uk.co.tfd.sm.template + 0.1-SNAPSHOT + + + org.sakaiproject.nakamura + uk.co.tfd.sm.memory + 0.1-SNAPSHOT + + + diff --git a/extensions/proxy/pom.xml.safe b/extensions/proxy/pom.xml.safe new file mode 100644 index 00000000..91a2772c --- /dev/null +++ b/extensions/proxy/pom.xml.safe @@ -0,0 +1,157 @@ + + + base + org.sakaiproject.nakamura + 1.1-SNAPSHOT + ../../pom.xml + + 4.0.0 + uk.co.tfd.sm + uk.co.tfd.sm.prox + bundle + Sparse Map :: proxy service + 0.1-SNAPSHOT + This will proxy through to other websites and fetch data + + + + org.apache.felix + maven-scr-plugin + + + org.apache.felix + maven-bundle-plugin + true + + + sakai-nakamura + sun.misc.* + uk.co.tfd.sm.api.proxy + uk.co.tfd.co.sm.proxy.* + !com.sun.*,!sun.misc,!sun.rmi.rmic,!sun.tools.*,!kaffe.rmi.*,!org.apache.bsf,org.apache.tools.ant.*;resolution:=optional,!org.apache.bcel.*,!org.jdom.*,!org.apache.avalon.*,* + true + velocity,werken-xpath,commons-logging,log4j,logkit,ant,xml-resolver,antlr,oro,ical4j + + + + + + + + org.apache.felix + org.osgi.core + + + org.apache.felix + org.osgi.compendium + + + org.apache.velocity + velocity + 1.6.2 + + + + + log4j + log4j + 1.2.12 + + + commons-logging + commons-logging + 1.1 + + + werken-xpath + werken-xpath + 0.9.4 + + + ant + ant + 1.6 + + + antlr + antlr + 2.7.7 + + + xml-resolver + xml-resolver + 1.2 + + + oro + oro + 2.0.8 + + + logkit + logkit + 2.0 + + + javax.servlet + servlet-api + 2.5 + provided + + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-simple + + + org.apache.felix + org.apache.felix.scr.annotations + + + + commons-httpclient + commons-httpclient + 3.1 + + + org.mortbay.jetty + jetty + 6.1.15 + test + + + org.mockito + mockito-all + + + org.codehaus.woodstox + stax2-api + 3.0.1 + provided + + + org.codehaus.woodstox + woodstox-core-asl + 4.0.6 + provided + + + relaxngDatatype + relaxngDatatype + 20020414 + provided + + + ical4j + ical4j + 0.9.20 + + + com.google.collections + google-collections + + + diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyClientException.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyClientException.java new file mode 100644 index 00000000..8014828d --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyClientException.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.api.proxy; + +/** + * Thrown in response to an error on the proxy template. + */ +public class ProxyClientException extends Exception { + + /** + * + */ + private static final long serialVersionUID = 4849409100175115491L; + + /** + * + */ + public ProxyClientException() { + } + + /** + * @param message + */ + public ProxyClientException(String message) { + super(message); + } + + /** + * @param cause + */ + public ProxyClientException(Throwable cause) { + super(cause); + } + + /** + * @param message + * @param cause + */ + public ProxyClientException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyClientService.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyClientService.java new file mode 100644 index 00000000..01f0967d --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyClientService.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.api.proxy; + +import java.io.InputStream; +import java.util.Map; + +/** + * A Proxy Client service provides processing for Proxy request, specified by a resource + * in the JCR. On invoking the executeCall method the supplied resource is inspected and + * used to generate a request which is then dispatched to the requested endpoint. + */ +public interface ProxyClientService { + + /** + * Specification property: The URL template for the end point. + */ + public static final String CONFIG_REQUEST_PROXY_ENDPOINT = "request-proxy-endpoint"; + + /** + * Specification property: The method to use at this end point + */ + public static final String CONFIG_REQUEST_PROXY_METHOD = "request-proxy-method"; + + /** + * Specification property: The content type of the request body. + */ + public static final String CONFIG_REQUEST_CONTENT_TYPE = "request-content-type"; + + /** + * Specification property: The tempalte for the request body, if required. + */ + public static final String CONFIG_PROXY_REQUEST_TEMPLATE = "proxy-request-template"; + + /** + * A multi value property containing a list of headers to add to the request. + */ + public static final String CONFIG_PROXY_HEADER = "proxy-header"; + + /** + * The maximum number of bytes that this request will accept. + */ + public static final String CONFIG_LIMIT_GET_SIZE = "proxy-limit-length"; + + + /** + * Executes a HTTP call using a path in the JCR to point to a template and a map of + * properties to populate that template with. An example might be a SOAP call. + * + *
    +   * {http://www.w3.org/2001/12/soap-envelope}Envelope:{
    +   *  {http://www.w3.org/2001/12/soap-envelope}Body:{
    +   *   {http://www.example.org/stock}GetStockPriceResponse:{
    +   *    >body:[       ]
    +   *    {http://www.example.org/stock}Price:{
    +   *     >body:[34.5]
    +   *    }
    +   *   }
    +   *   >body:[  ]
    +   *  }
    +   *  >body:[   ]
    +   *  {http://www.w3.org/2001/12/soap-envelope}encodingStyle:[http://www.w3.org/2001/12/soap-encoding]
    +   * }
    +   * 
    +   * 
    + * + * @param node + * the node containing the proxy end point specification. + * @param headers + * a map of headers to set int the request. + * @param input + * a map of parameters for all templates (both url and body) + * @param requestInputStream + * containing the request body (can be null if the call requires no body or the + * template will be used to generate the body) + * @param requestContentLength + * if the requestImputStream is specified, the length specifies the length of + * the body. + * @param requerstContentType + * the content type of the request, if null the node property + * sakai:proxy-request-content-type will be used. + * @throws ProxyClientException + */ + ProxyResponse executeCall(Map config, + Map headers, Map input, + InputStream requestInputStream, long requestContentLength, + String requestContentType) throws ProxyClientException; + + + + +} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/RowHasher.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyMethod.java similarity index 76% rename from src/main/java/org/sakaiproject/nakamura/lite/storage/RowHasher.java rename to extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyMethod.java index 141d04f9..d9ddeadc 100644 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/RowHasher.java +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyMethod.java @@ -15,12 +15,12 @@ * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ -package org.sakaiproject.nakamura.lite.storage; +package uk.co.tfd.sm.api.proxy; -import org.sakaiproject.nakamura.api.lite.StorageClientException; - -public interface RowHasher { - - String rowHash(String keySpace, String columnFamily, String key) throws StorageClientException; +/** + * An enumeration of supported Proxy methods. These are used in the Resource specifying the endpoint. + */ +public enum ProxyMethod { + GET(), POST(), PUT(), HEAD(), OPTIONS(); } diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyPostProcessor.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyPostProcessor.java new file mode 100644 index 00000000..6049c9e1 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyPostProcessor.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.api.proxy; + +import java.io.IOException; +import java.util.Map; + +import javax.servlet.http.HttpServletResponse; + +/** + * + */ +public interface ProxyPostProcessor { + + /** + * + */ + public static final String CONFIG_POSTPROCESSOR = "postprocessor"; + + /** + * @param templateParams + * The parameters used to fill out the template. These are supplied as request + * parameters. + * @param config The proxy config. + * @param response + * The response that will be sent to the user. + * @param proxyResponse + * The response as it came back from the remote resource. + * @throws IOException + */ + void process(Map config, Map templateParams, HttpServletResponse response, + ProxyResponse proxyResponse) throws IOException; + + /** + * @return The name of this ProxyPostProcessor. Nodes that want this processor to be run + * should set the sakai:postprocessor property to this value. + */ + String getName(); + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyPreProcessor.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyPreProcessor.java new file mode 100644 index 00000000..79ef16d8 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyPreProcessor.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.api.proxy; + +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +/** + * A pre processor interface for Proxy Requests, preprocesses each request performing + * further processing on the headers and templateParameters. + */ +public interface ProxyPreProcessor { + + /** + * + */ + public static final String CONFIG_PREPROCESSOR = "preprocessor"; + + /** + * @param request + * The request that hits the proxy node. + * @param headers + * A map of headers already collected. The map can be modified as wished. All + * of the entries in this map will end up als headers on the outgoing request + * to the remote resource. + * @param templateParams + * A map of template parameters that will be passed to the template engine to render the + * url. + */ + void preProcessRequest(HttpServletRequest request, Map headers, + Map templateParams); + + /** + * @return The name of this ProxyPreProcessor. Nodes that want this processor to be run + * should set the sakai:preprocessor property to this value. + */ + String getName(); + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyResponse.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyResponse.java new file mode 100644 index 00000000..e0abd8dd --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/api/proxy/ProxyResponse.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.api.proxy; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; + +/** + * Represents a response from a proxy + */ +public interface ProxyResponse { + + /** + * @return + */ + int getResultCode(); + + /** + * @return + */ + Map getResponseHeaders(); + + + /** + * @return + * @throws IOException + */ + InputStream getResponseBodyAsInputStream() throws IOException; + + /** + * @return + * @throws IOException + */ + String getResponseBodyAsString() throws IOException; + + /** + * + */ + void close(); + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/CachingProxyProcessor.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/CachingProxyProcessor.java new file mode 100644 index 00000000..6f77f95e --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/CachingProxyProcessor.java @@ -0,0 +1,13 @@ +package uk.co.tfd.sm.proxy; + +import java.io.IOException; +import java.util.Map; + +import javax.servlet.http.HttpServletResponse; + +public interface CachingProxyProcessor { + + boolean sendCached(Map config, + Map templateParams, HttpServletResponse response) throws IOException; + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/DefaultProxyPostProcessorImpl.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/DefaultProxyPostProcessorImpl.java new file mode 100644 index 00000000..fbba9341 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/DefaultProxyPostProcessorImpl.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + + +import uk.co.tfd.sm.api.proxy.ProxyPostProcessor; +import uk.co.tfd.sm.api.proxy.ProxyResponse; + +import java.io.IOException; +import java.util.Map; +import java.util.Map.Entry; + +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.io.IOUtils; + +/** + * + */ +public class DefaultProxyPostProcessorImpl implements ProxyPostProcessor { + + /** + * {@inheritDoc} + * @throws IOException + * @see uk.co.tfd.sm.api.proxy.ProxyPostProcessor#process(org.apache.sling.api.SlingHttpServletResponse, uk.co.tfd.sm.api.proxy.ProxyResponse) + */ + public void process(Map config, Map templateParams, + HttpServletResponse response, ProxyResponse proxyResponse) throws IOException { + for (Entry h : proxyResponse.getResponseHeaders().entrySet()) { + for (String v : h.getValue()) { + response.setHeader(h.getKey(), v); + } + } + int code = proxyResponse.getResultCode(); + response.setStatus(code); + IOUtils.copy(proxyResponse.getResponseBodyAsInputStream(), response + .getOutputStream()); + } + + /** + * {@inheritDoc} + * @see uk.co.tfd.sm.api.proxy.ProxyPostProcessor#getName() + */ + public String getName() { + return "default"; + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/InputStreamHolder.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/InputStreamHolder.java new file mode 100644 index 00000000..217088f8 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/InputStreamHolder.java @@ -0,0 +1,13 @@ +package uk.co.tfd.sm.proxy; + +import java.io.InputStream; + +public interface InputStreamHolder { + + InputStream getStream(); + + String getMimeType(); + + String getFileName(); + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/NonResolvableResource.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/NonResolvableResource.java new file mode 100644 index 00000000..b1ac0842 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/NonResolvableResource.java @@ -0,0 +1,18 @@ +package uk.co.tfd.sm.proxy; + +public class NonResolvableResource implements Resource { + + private String value; + public NonResolvableResource(String value) { + this.value = value; + } + public boolean isReference() { + return true; + } + @Override + public String toString() { + return value; + } + + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ProxyClientServiceImpl.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ProxyClientServiceImpl.java new file mode 100644 index 00000000..a15a0736 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ProxyClientServiceImpl.java @@ -0,0 +1,468 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ + +package uk.co.tfd.sm.proxy; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.io.StringReader; +import java.io.StringWriter; +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; +import java.net.ProxySelector; +import java.net.URL; +import java.nio.charset.Charset; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.lang.StringUtils; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.apache.http.Header; +import org.apache.http.HttpHeaders; +import org.apache.http.HttpMessage; +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpOptions; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.entity.InputStreamEntity; +import org.apache.http.entity.mime.MultipartEntity; +import org.apache.http.entity.mime.content.StringBody; +import org.apache.http.impl.client.DefaultHttpClient; +import org.apache.http.impl.conn.ProxySelectorRoutePlanner; +import org.apache.http.params.HttpParams; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyClientException; +import uk.co.tfd.sm.api.proxy.ProxyClientService; +import uk.co.tfd.sm.api.proxy.ProxyMethod; +import uk.co.tfd.sm.api.proxy.ProxyPostProcessor; +import uk.co.tfd.sm.api.proxy.ProxyResponse; +import uk.co.tfd.sm.api.template.TemplateService; + +import com.google.common.collect.Maps; + +/** + * + */ +@Service(value = ProxyClientService.class) +@Component(immediate = true, metatype = true) +public class ProxyClientServiceImpl implements ProxyClientService { + + /** + * Default content type of request bodies if none has been specified. + */ + private static final String APPLICATION_OCTET_STREAM = "application/octet-stream"; + + @Property(value = { "rss", "trustedLoginTokenProxyPostProcessor", + "someothersafepostprocessor" }) + private static final String SAFE_POSTPROCESSORS = "safe.postprocessors"; + + private static final Logger LOGGER = LoggerFactory + .getLogger(ProxyClientServiceImpl.class); + + + @Reference + protected TemplateService templateService; + + private Map configProperties; + + private Set safeOpenProcessors = new HashSet(); + + private boolean useJreProxy = false; + + private ThreadLocal> boundConfig = new ThreadLocal>(); + + + /** + * Create resources used by this component. + * + * @param ctx + * @throws Exception + */ + protected void activate(Map properties) throws Exception { + configProperties = properties; + String[] safePostProcessorNames = (String[]) configProperties + .get(SAFE_POSTPROCESSORS); + if (safePostProcessorNames == null) { + safeOpenProcessors.add("rss"); + safeOpenProcessors.add("trustedLoginTokenProxyPostProcessor"); + } else { + for (String pp : safePostProcessorNames) { + safeOpenProcessors.add(pp); + } + } + + // allow communications via a proxy server if command line + // java parameters http.proxyHost,http.proxyPort,http.proxyUser, + // http.proxyPassword have been provided. + String proxyHost = System.getProperty("http.proxyHost", ""); + if (!proxyHost.equals("")) { + useJreProxy = true; + } + } + + private HttpClient getHttpClient() { + DefaultHttpClient httpclient = new DefaultHttpClient(); + if (useJreProxy) { + ProxySelectorRoutePlanner routePlanner = new ProxySelectorRoutePlanner( + httpclient.getConnectionManager().getSchemeRegistry(), + ProxySelector.getDefault()); + httpclient.setRoutePlanner(routePlanner); + } + return httpclient; + } + + /** + * Clean up resources used by this component + * + * @param ctx + * @throws Exception + */ + protected void deactivate(Map ctx) throws Exception { + } + + /** + * Executes a HTTP call using a path in the JCR to point to a template and a + * map of properties to populate that template with. An example might be a + * SOAP call. + * + *
    +	 * {http://www.w3.org/2001/12/soap-envelope}Envelope:{
    +	 *  {http://www.w3.org/2001/12/soap-envelope}Body:{
    +	 *   {http://www.example.org/stock}GetStockPriceResponse:{
    +	 *    >body:[       ]
    +	 *    {http://www.example.org/stock}Price:{
    +	 *     >body:[34.5]
    +	 *    }
    +	 *   }
    +	 *   >body:[  ]
    +	 *  }
    +	 *  >body:[   ]
    +	 *  {http://www.w3.org/2001/12/soap-envelope}encodingStyle:[http://www.w3.org/2001/12/soap-encoding]
    +	 * }
    +	 * 
    +	 * 
    + * + * @param resource + * the resource containing the proxy end point specification. + * @param headers + * a map of headers to set int the request. + * @param input + * a map of parameters for all templates (both url and body) + * @param requestInputStream + * containing the request body (can be null if the call requires + * no body or the template will be used to generate the body) + * @param requestContentLength + * if the requestImputStream is specified, the length specifies + * the lenght of the body. + * @param requerstContentType + * the content type of the request, if null the node property + * sakai:proxy-request-content-type will be used. + * @throws ProxyClientException + */ + public ProxyResponse executeCall(Map config, + Map headers, Map input, + InputStream requestInputStream, long requestContentLength, + String requestContentType) throws ProxyClientException { + try { + LOGGER.info( + "Calling Execute Call with Config:[{}] Headers:[{}] Input:[{}] " + + "RequestInputStream:[{}] InputStreamContentLength:[{}] RequestContentType:[{}] ", + new Object[] { config, headers, input, requestInputStream, + requestContentLength, requestContentType }); + bindConfig(config); + + if (config != null + && config.containsKey(CONFIG_REQUEST_PROXY_ENDPOINT)) { + // setup the post request + String endpointURL = (String) config + .get(CONFIG_REQUEST_PROXY_ENDPOINT); + if (isUnsafeProxyDefinition(config)) { + try { + URL u = new URL(endpointURL); + String host = u.getHost(); + if (host.indexOf('$') >= 0) { + throw new ProxyClientException( + "Invalid Endpoint template, relies on request to resolve valid URL " + + u); + } + } catch (MalformedURLException e) { + throw new ProxyClientException( + "Invalid Endpoint template, relies on request to resolve valid URL", + e); + } + } + + LOGGER.info("Valied Endpoint Def"); + + Map context = Maps.newHashMap(input); + + // add in the config properties from the bundle overwriting + // everything else. + context.put("config", configProperties); + + endpointURL = processUrlTemplate(endpointURL, context); + + LOGGER.info("Calling URL {} ",endpointURL); + + ProxyMethod proxyMethod = ProxyMethod.GET; + if (config.containsKey(CONFIG_REQUEST_PROXY_METHOD)) { + try { + proxyMethod = ProxyMethod.valueOf((String) config + .get(CONFIG_REQUEST_PROXY_METHOD)); + } catch (Exception e) { + + } + } + + HttpClient client = getHttpClient(); + + HttpUriRequest method = null; + switch (proxyMethod) { + case GET: + if (config.containsKey(CONFIG_LIMIT_GET_SIZE)) { + long maxSize = (Long) config.get(CONFIG_LIMIT_GET_SIZE); + HttpHead h = new HttpHead(endpointURL); + + HttpParams params = h.getParams(); + // make certain we reject the body of a head + params.setBooleanParameter( + "http.protocol.reject-head-body", true); + h.setParams(params); + populateMessage(method, config, headers); + HttpResponse response = client.execute(h); + if (response.getStatusLine().getStatusCode() == 200) { + // Check if the content-length is smaller than the + // maximum (if any). + Header contentLengthHeader = response + .getLastHeader("Content-Length"); + if (contentLengthHeader != null) { + long length = Long + .parseLong(contentLengthHeader + .getValue()); + if (length > maxSize) { + return new ProxyResponseImpl( + HttpServletResponse.SC_PRECONDITION_FAILED, + "Response too large", response); + } + } + } else { + return new ProxyResponseImpl(response); + } + } + method = new HttpGet(endpointURL); + break; + case HEAD: + method = new HttpHead(endpointURL); + break; + case OPTIONS: + method = new HttpOptions(endpointURL); + break; + case POST: + method = new HttpPost(endpointURL); + break; + case PUT: + method = new HttpPut(endpointURL); + break; + default: + method = new HttpGet(endpointURL); + } + + populateMessage(method, config, headers); + + if (requestInputStream == null + && !config.containsKey(CONFIG_PROXY_REQUEST_TEMPLATE)) { + if (method instanceof HttpPost) { + HttpPost postMethod = (HttpPost) method; + MultipartEntity multipart = new MultipartEntity(); + for (Entry param : input.entrySet()) { + String key = param.getKey(); + Object value = param.getValue(); + if (value instanceof Object[]) { + for (Object val : (Object[]) value) { + addPart(multipart, key, val); + } + } else { + addPart(multipart, key, value); + } + postMethod.setEntity(multipart); + } + } + } else { + + if (method instanceof HttpEntityEnclosingRequestBase) { + String contentType = requestContentType; + if (contentType == null + && config + .containsKey(CONFIG_REQUEST_CONTENT_TYPE)) { + contentType = (String) config + .get(CONFIG_REQUEST_CONTENT_TYPE); + + } + if (contentType == null) { + contentType = APPLICATION_OCTET_STREAM; + } + HttpEntityEnclosingRequestBase eemethod = (HttpEntityEnclosingRequestBase) method; + if (requestInputStream != null) { + eemethod.setHeader(HttpHeaders.CONTENT_TYPE, + contentType); + eemethod.setEntity(new InputStreamEntity( + requestInputStream, requestContentLength)); + } else { + // build the request + StringWriter body = new StringWriter(); + templateService.evaluate(context, body, (String) config + .get("path"), (String) config + .get(CONFIG_PROXY_REQUEST_TEMPLATE)); + byte[] soapBodyContent = body.toString().getBytes( + "UTF-8"); + eemethod.setHeader(HttpHeaders.CONTENT_TYPE, + contentType); + eemethod.setEntity(new InputStreamEntity( + new ByteArrayInputStream(soapBodyContent), + soapBodyContent.length)); + + } + } + } + + HttpResponse response = client.execute(method); + if (response.getStatusLine().getStatusCode() == 302 + && method instanceof HttpEntityEnclosingRequestBase) { + // handle redirects on post and put + String url = response.getFirstHeader("Location").getValue(); + method = new HttpGet(url); + response = client.execute(method); + } + + return new ProxyResponseImpl(response); + } + + } catch (ProxyClientException e) { + throw e; + } catch (Exception e) { + LOGGER.error(e.getMessage(),e); + throw new ProxyClientException("The Proxy request specified by " + + config + " failed, cause follows:", e); + } finally { + unbindConfig(); + } + throw new ProxyClientException("The Proxy request specified by " + + config + " does not contain a valid endpoint specification "); + } + + private void addPart(MultipartEntity multipart, String key, Object value) + throws UnsupportedEncodingException { + if (value instanceof String[]) { + String[] v = (String[]) value; + for ( String s : v) { + multipart.addPart(key, + new StringBody(s, Charset.forName("UTF-8"))); + } + } else { + multipart.addPart(key, + new StringBody((String) value, Charset.forName("UTF-8"))); + } + } + + private boolean isUnsafeProxyDefinition(Map config) { + if (config.containsKey(ProxyPostProcessor.CONFIG_POSTPROCESSOR)) { + String postProcessorName = (String) config + .get(ProxyPostProcessor.CONFIG_POSTPROCESSOR); + return !safeOpenProcessors.contains(postProcessorName); + } + return true; + } + + + private String processUrlTemplate(String endpointURL, + Map context) throws IOException { + Reader urlTemplateReader = new StringReader(endpointURL); + StringWriter urlWriter = new StringWriter(); + templateService.evaluate(context, urlWriter, "urlprocessing", + urlTemplateReader); + return urlWriter.toString(); + } + + /** + * @param method + * @throws RepositoryException + */ + private void populateMessage(HttpMessage message, + Map config, Map headers) { + + for (Entry header : headers.entrySet()) { + Object o = header.getValue(); + if (o instanceof String[]) { + for (String s : (String[]) o) { + message.addHeader(header.getKey(), s); + } + } else if (o != null) { + message.addHeader(header.getKey(), String.valueOf(o)); + } + } + + String additionalHeaders = (String) config.get(CONFIG_PROXY_HEADER); + if (additionalHeaders != null) { + for (String v : StringUtils.split(additionalHeaders, ",")) { + String[] keyVal = StringUtils.split(v, ":", 2); + message.addHeader(keyVal[0].trim(), keyVal[1].trim()); + } + } + + } + + /** + * + */ + private void unbindConfig() { + boundConfig.set(null); + } + + /** + * @param resource + */ + private void bindConfig(Map config) { + boundConfig.set(config); + } + + /** + * {@inheritDoc} + * + * @see au.edu.csu.sakai.integration.api.soapclient.ResourceSource#getResource() + */ + public Map getConfig() { + return boundConfig.get(); + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ProxyResponseImpl.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ProxyResponseImpl.java new file mode 100644 index 00000000..2e4cf8b8 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ProxyResponseImpl.java @@ -0,0 +1,203 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import org.apache.http.Header; +import org.apache.http.HeaderElement; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.NameValuePair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Lists; + +import uk.co.tfd.sm.api.proxy.ProxyResponse; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Implements the ProxyResponse holder by wrapping the HttpMethod request. + */ +public class ProxyResponseImpl implements ProxyResponse { + + private static final Logger LOGGER = LoggerFactory.getLogger(ProxyResponseImpl.class); + private HttpResponse response; + private Map headers = new HashMap(); + private List leakedInputStreams = Lists.newArrayList(); + private int code; + private String responseCause; + + /** + * @param result + * @param method + */ + public ProxyResponseImpl(HttpResponse response) { + this.response = response; + code = response.getStatusLine().getStatusCode(); + responseCause = response.getStatusLine().getReasonPhrase(); + + for (org.apache.http.Header header : response.getAllHeaders()) { + String name = header.getName(); + String[] values = headers.get(name); + if (values == null) { + values = new String[] { header.getValue() }; + } else { + String[] newValues = new String[values.length + 1]; + System.arraycopy(values, 0, newValues, 0, values.length); + newValues[values.length] = header.getValue(); + values = newValues; + } + + boolean add = true; + // We ignore JSESSIONID cookies coming back. + if (name.toLowerCase().equals("set-cookie")) { + for (String v : values) { + if (v.contains("JSESSIONID")) { + add = false; + break; + } + } + } + if (add) { + headers.put(name, values); + } + } + } + + public ProxyResponseImpl(int responseCode, String responseMessage, + HttpResponse response) { + this(response); + code = responseCode; + responseCause = responseMessage; + } + + /** + * {@inheritDoc} + * + * @see uk.co.tfd.sm.api.proxy.ProxyResponse#getResultCode() + */ + public int getResultCode() { + return code; + } + + /** + * {@inheritDoc} + * + * @see uk.co.tfd.sm.api.proxy.ProxyResponse#getResponseHeaders() + */ + public Map getResponseHeaders() { + return headers; + } + + /** + * {@inheritDoc} + * + * @see uk.co.tfd.sm.api.proxy.ProxyResponse#getResponseBodyAsInputStream() + */ + public InputStream getResponseBodyAsInputStream() throws IOException { + InputStream in = response.getEntity().getContent(); + leakedInputStreams.add(in); + return in; + } + + + /** + * {@inheritDoc} + * + * @see uk.co.tfd.sm.api.proxy.ProxyResponse#getResponseBodyAsString() + */ + public String getResponseBodyAsString() throws IOException { + HttpEntity entity = response.getEntity(); + if ( entity == null ) { + return null; + } + String contentEncoding = getContentEncoding(entity); + InputStream in = entity.getContent(); + BufferedReader r = new BufferedReader(new InputStreamReader(in, + contentEncoding)); + StringBuilder sb = new StringBuilder(); + for (;;) { + String l = r.readLine(); + if (l == null) { + break; + } + sb.append(l).append("\n"); + } + r.close(); + in.close(); + return sb.toString(); + } + + private String getContentEncoding(HttpEntity entity) { + String contentEncoding = null; + if ( entity == null ) { + return "UTF-8"; + } + Header contentEncodingHeader = entity.getContentEncoding(); + if (contentEncodingHeader != null) { + contentEncoding = contentEncodingHeader.getValue(); + } + if (contentEncoding == null) { + Header contentTypeHeader = entity.getContentType(); + if (contentTypeHeader != null) { + HeaderElement[] entries = contentTypeHeader.getElements(); + for (HeaderElement h : entries) { + NameValuePair charset = h.getParameterByName("charset"); + if (charset != null) { + contentEncoding = charset.getValue(); + break; + } + } + } + } + if (contentEncoding == null) { + contentEncoding = "UTF-8"; + } + return contentEncoding; + } + + /** + * {@inheritDoc} + * + * @see uk.co.tfd.sm.api.proxy.ProxyResponse#close() + */ + public void close() { + for ( InputStream in : leakedInputStreams) { + try { + in.close(); + } catch (IOException e) { + LOGGER.debug(e.getMessage(),e); + } + } + } + + /** + * @return the cause + */ + public String getCause() { + return responseCause; + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ProxyServlet.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ProxyServlet.java new file mode 100644 index 00000000..e76405df --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ProxyServlet.java @@ -0,0 +1,342 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; +import java.util.Enumeration; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +import javax.servlet.Servlet; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.collections.EnumerationUtils; +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.ReferenceCardinality; +import org.apache.felix.scr.annotations.ReferencePolicy; +import org.apache.felix.scr.annotations.Service; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyClientException; +import uk.co.tfd.sm.api.proxy.ProxyClientService; +import uk.co.tfd.sm.api.proxy.ProxyPostProcessor; +import uk.co.tfd.sm.api.proxy.ProxyPreProcessor; +import uk.co.tfd.sm.api.proxy.ProxyResponse; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; + +/** + * This servlet binds to a resource that defines an end point. + * + */ +@Component(immediate = true, metatype = true) +@Service(value = Servlet.class) +@org.apache.felix.scr.annotations.Properties(value = { @Property(name = "alias", value = "/proxy") }) +public class ProxyServlet extends HttpServlet { + + + private static final boolean DEFAULT_CACHE_CONFIG = false; + + @Property(boolValue=DEFAULT_CACHE_CONFIG) + private static final String CACHE_CONFIG = "cache-config"; + + + protected static final String DEFAULT_TEMPLATE_PATH = "proxy/config"; + + + @Property(value=DEFAULT_TEMPLATE_PATH) + protected static final String PROP_TEMPLATE_PATH = "templatePath"; + + private static final String SAKAI_PROXY_REQUEST_BODY = "Sakai-Proxy-Request-Body"; + + private static final String PUT_METHOD = "PUT"; + + private static final String POST_METHOD = "POST"; + + public static final String PROXY_PATH_PREFIX = "/var/proxy/"; + + /** + * + */ + private static final String SAKAI_REQUEST_STREAM_BODY = "sakai:request-stream-body"; + + /** + * + */ + private static final String BASIC_PASSWORD = ":basic-password"; + + /** + * + */ + private static final String BASIC_USER = ":basic-user"; + + /** + * + */ + private static final long serialVersionUID = -3190208378955330531L; + + private static final Logger LOGGER = LoggerFactory + .getLogger(ProxyServlet.class); + + @Reference + transient ProxyClientService proxyClientService; + + private transient ProxyPostProcessor defaultPostProcessor = new DefaultProxyPostProcessorImpl(); + + @Reference(policy = ReferencePolicy.DYNAMIC, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, referenceInterface = ProxyPreProcessor.class, bind = "bindPreProcessor", unbind = "unbindPreProcessor") + Map preProcessors = new ConcurrentHashMap(); + + @Reference(policy = ReferencePolicy.DYNAMIC, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, referenceInterface = ProxyPostProcessor.class, bind = "bindPostProcessor", unbind = "unbindPostProcessor") + Map postProcessors = new ConcurrentHashMap(); + + private static final Set HEADER_BLACKLIST = ImmutableSet.of("Host", + "Content-Length", "Content-Type", "Authorization"); + + private static final String CLASSPATH_PREFIX = "uk/co/tfd/sm/proxy"; + + + private String baseFile; + + /** + * There will almost certainly never be enough valid proxy maps to need this + * to be cleared. + */ + private Map> configCache = Maps + .newConcurrentMap(); + + + private boolean cacheConfig; + + @Activate + protected void activate(Map properties) { + baseFile = toString(properties.get(PROP_TEMPLATE_PATH), + DEFAULT_TEMPLATE_PATH); + cacheConfig = toBoolean(properties.get(CACHE_CONFIG),DEFAULT_CACHE_CONFIG); + } + + private String toString(Object configValue, String defaultValue) { + if (configValue == null) { + return defaultValue; + } + return String.valueOf(configValue); + } + + private boolean toBoolean(Object configValue, boolean defaultValue) { + if (configValue == null) { + return defaultValue; + } + return Boolean.parseBoolean(String.valueOf(configValue)); + } + + @SuppressWarnings("unchecked") + @Override + protected void service(HttpServletRequest request, + HttpServletResponse response) throws ServletException, IOException { + try { + boolean proxyStream = false; + if (POST_METHOD.equals(request.getMethod()) + || PUT_METHOD.equals(request.getMethod())) { + String proxyRequest = request + .getHeader(SAKAI_PROXY_REQUEST_BODY); + proxyStream = Boolean.parseBoolean(proxyRequest); + } + + String path = request.getPathInfo(); + Map config = getConfig(path); + if (config == null) { + response.sendError(404); + return; + } + + if (!proxyStream) { + String streamBody = (String) config + .get(SAKAI_REQUEST_STREAM_BODY); + if (streamBody != null) { + proxyStream = Boolean.parseBoolean(streamBody); + } + } + + Builder headersBuilder = ImmutableMap.builder(); + Builder templateParamsBuilder = ImmutableMap + .builder(); + + for (Enumeration enames = request.getHeaderNames(); enames + .hasMoreElements();) { + + String name = (String) enames.nextElement(); + if (HEADER_BLACKLIST.contains(name)) { + continue; + } + if (name.equals(BASIC_USER)) { + templateParamsBuilder.put(BASIC_USER, + request.getHeader(BASIC_USER)); + } else if (name.equals(BASIC_PASSWORD)) { + templateParamsBuilder.put(BASIC_USER, + request.getHeader(BASIC_USER)); + } else if (!name.startsWith(":")) { + headersBuilder.put( + name, + toSimpleString((String[]) EnumerationUtils.toList( + request.getHeaders(name)).toArray( + new String[0]))); + } + } + + Map headers = headersBuilder.build(); + + // collect the parameters and store into a mutable map. + Map rpm = request.getParameterMap(); + for (Entry e : rpm.entrySet()) { + templateParamsBuilder.put(e.getKey(), + toSimpleString(e.getValue())); + } + + Map templateParams = templateParamsBuilder.build(); + + // we might want to pre-process the headers + if (config.containsKey(ProxyPreProcessor.CONFIG_PREPROCESSOR)) { + String preprocessorName = (String) config + .get(ProxyPreProcessor.CONFIG_PREPROCESSOR); + ProxyPreProcessor preprocessor = preProcessors + .get(preprocessorName); + if (preprocessor != null) { + preprocessor.preProcessRequest(request, headers, + templateParams); + } else { + LOGGER.warn( + "Unable to find pre processor of name {} for node {} ", + preprocessorName, path); + } + } + ProxyPostProcessor postProcessor = defaultPostProcessor; + // we might want to post-process the headers + if (config.containsKey(ProxyPostProcessor.CONFIG_POSTPROCESSOR)) { + String postProcessorName = (String) config + .get(ProxyPostProcessor.CONFIG_POSTPROCESSOR); + if (postProcessors.containsKey(postProcessorName)) + postProcessor = postProcessors.get(postProcessorName); + if (postProcessor == null) { + LOGGER.warn( + "Unable to find post processor of name {} for node {} ", + postProcessorName, path); + postProcessor = defaultPostProcessor; + } + } + if ( postProcessor instanceof CachingProxyProcessor) { + CachingProxyProcessor cachingPostProcessor = (CachingProxyProcessor) postProcessor; + if ( cachingPostProcessor.sendCached(config, templateParams, response)) { + return; + } + } + + ProxyResponse proxyResponse = proxyClientService.executeCall( + config, headers, templateParams, null, -1, null); + try { + postProcessor.process(config, templateParams, response, proxyResponse); + } finally { + proxyResponse.close(); + } + } catch (IOException e) { + throw e; + } catch (ProxyClientException e) { + response.sendError(500, e.getMessage()); + } + } + + private Object toSimpleString(String[] a) { + if (a == null) { + return null; + } + if (a.length == 1) { + return a[0]; + } + return a; + } + + private Map getConfig(String pathInfo) throws IOException { + if (pathInfo == null) { + return null; + } + + if (cacheConfig && configCache.containsKey(pathInfo)) { + return configCache.get(pathInfo); + } else { + Properties p = new Properties(); + InputStream in = this.getClass().getClassLoader() + .getResourceAsStream(CLASSPATH_PREFIX + pathInfo); + if (in != null) { + LOGGER.info("Loading Classpath {} ", CLASSPATH_PREFIX+pathInfo); + p.load(in); + in.close(); + } else { + LOGGER.info("Not Loading Classpath {} ", CLASSPATH_PREFIX+pathInfo); + File configFile = new File(baseFile, pathInfo); + if (!configFile.exists() || !configFile.isFile() + || !configFile.canRead()) { + LOGGER.info("Not Loading File {} ", configFile.getAbsoluteFile()); + return null; + } + LOGGER.info("Loading File {} ", configFile.getAbsoluteFile()); + FileReader fr = new FileReader(configFile); + p.load(fr); + fr.close(); + } + Builder b = ImmutableMap.builder(); + for (Entry e : p.entrySet()) { + String k = String.valueOf(e.getKey()); + b.put(k, e.getValue()); + } + Map config = b.build(); + configCache.put(pathInfo, config); + return config; + } + } + + protected void bindPreProcessor(ProxyPreProcessor proxyPreProcessor) { + preProcessors.put(proxyPreProcessor.getName(), proxyPreProcessor); + } + + protected void unbindPreProcessor(ProxyPreProcessor proxyPreProcessor) { + preProcessors.remove(proxyPreProcessor.getName()); + } + + protected void bindPostProcessor(ProxyPostProcessor proxyPostProcessor) { + postProcessors.put(proxyPostProcessor.getName(), proxyPostProcessor); + } + + protected void unbindPostProcessor(ProxyPostProcessor proxyPostProcessor) { + postProcessors.remove(proxyPostProcessor.getName()); + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RDFToHTMLProxyPostProcessor.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RDFToHTMLProxyPostProcessor.java new file mode 100644 index 00000000..ee7964e9 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RDFToHTMLProxyPostProcessor.java @@ -0,0 +1,243 @@ +package uk.co.tfd.sm.proxy; + +import java.io.IOException; +import java.io.StringReader; +import java.text.MessageFormat; +import java.util.Map; + +import javax.servlet.http.HttpServletResponse; +import javax.xml.stream.XMLStreamException; + +import org.apache.commons.lang.StringUtils; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.util.Iterables; +import org.sakaiproject.nakamura.api.memory.Cache; +import org.sakaiproject.nakamura.api.memory.CacheManagerService; +import org.sakaiproject.nakamura.api.memory.CacheScope; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyPostProcessor; +import uk.co.tfd.sm.api.proxy.ProxyResponse; +import uk.co.tfd.sm.api.template.TemplateService; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.Maps; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +@Component(immediate = true, metatype = true) +@Service(value = ProxyPostProcessor.class) +public class RDFToHTMLProxyPostProcessor implements ProxyPostProcessor, + CachingProxyProcessor { + + private static final String CONFIG_CONTENT_TYPE = "content-type"; + + private static final String CONFIG_CONTENT_ENCODING = "content-encoding"; + + private static final String RESULT = "result"; + + public static final String CONFIG_RESULT_KEY = "result-key"; + + public static final String CONFIG_FINALTEMPLATE = "finaltemplate"; + + public static final String CONFIG_FINALTEMPLATE_PATTERN = "finaltemplatepattern"; + + public static final String CONFIG_FINALTEMPLATE_SELECTOR_PROPERTY = "finaltemplateselectorproperty"; + + private static final Object CONFIG_FINALTEMPLATE_PRIORITIES = "templatepriorities"; + + public static final String CONFIG_NAMESPACEMAP = "namespacemap"; + + private static final Logger LOGGER = LoggerFactory + .getLogger(RDFToHTMLProxyPostProcessor.class); + + @Reference + protected TemplateService templateService; + + private Map templatePriorities; + + @Reference + private Resolver rdfResourceResolver; + + @Reference + private CacheManagerService cacheManagerService; + + @SuppressWarnings("unchecked") + public void process(Map config, + Map templateParams, HttpServletResponse response, + ProxyResponse proxyResponse) throws IOException { + try { + String namespaceMapConfig = (String) config + .get(CONFIG_NAMESPACEMAP); + LOGGER.info("Namespace setup {} ", namespaceMapConfig); + + ResolverHolder.set(rdfResourceResolver); + RDFToMap rdfToMap = new RDFToMap(namespaceMapConfig, config); + String key = (String) templateParams.get(config + .get(CONFIG_RESULT_KEY)); + rdfToMap.readMap( + new StringReader(proxyResponse.getResponseBodyAsString())) + .resolveToFullJson(); + if ( cacheManagerService != null ) { + Cache> cache = cacheManagerService + .getCache(RdfResourceResolver.RDFMAPS_CACHE_NAME, + CacheScope.INSTANCE); + rdfToMap.saveCache(cache); + } + Map fullMap = rdfToMap.toMap(); + if (key != null && fullMap.containsKey(key)) { + fullMap.put(RESULT, ImmutableMap + .copyOf((Map) fullMap.get(key))); + } + sendResult(key, config, fullMap, response); + } catch (XMLStreamException e) { + LOGGER.error(e.getMessage(), e); + response.sendError(500, + "Failed to parse response from remote server"); + } finally { + ResolverHolder.clear(); + } + + } + + @SuppressWarnings("unchecked") + private void sendResult(String key, Map config, + Map fullMap, HttpServletResponse response) + throws IOException { + String templateName = (String) config.get(CONFIG_FINALTEMPLATE); + String templateNamePattern = (String) config + .get(CONFIG_FINALTEMPLATE_PATTERN); + String templateSelectorProperty = (String) config + .get(CONFIG_FINALTEMPLATE_SELECTOR_PROPERTY); + String templatePrioritiesConfig = (String) config + .get(CONFIG_FINALTEMPLATE_PRIORITIES); + + Builder b = ImmutableMap.builder(); + if (templatePrioritiesConfig != null) { + String[] priorityPairs = StringUtils.split( + templatePrioritiesConfig, ","); + for (String priorityPair : priorityPairs) { + String[] p = StringUtils.split(priorityPair, ":"); + b.put(p[0], Integer.parseInt(p[1])); + } + } + + templatePriorities = b.build(); + if (templateNamePattern != null) { + // get the most significant type, and see of we have a suitable + // template + LOGGER.info("Checking template Pattern {} property {}", + templateNamePattern, templateSelectorProperty); + Map base = fullMap; + if (base.containsKey(RESULT)) { + base = (Map) base.get(RESULT); + } + if (base.containsKey(templateSelectorProperty)) { + Object o = base.get(templateSelectorProperty); + LOGGER.info("Selector is {}", o); + Iterable ov = null; + if ( o instanceof Iterable ) { + ov = (Iterable) o; + } else if ( o instanceof Object[] ) { + ov = Iterables.of((Object[]) o); + } else { + ov = Iterables.of(new Object[]{String.valueOf(o)}); + } + int templatePriority = -1; + for (Object v : ov) { + String oe = String.valueOf(v); + int i = oe.indexOf("#"); + if (i >= 0) { + oe = oe.substring(i + 1); + } + String testTemplateName = MessageFormat.format( + templateNamePattern, oe); + if (templateService.checkTemplateExists(testTemplateName)) { + LOGGER.info("Using Template {} ", testTemplateName); + int thisPriority = 0; + if (templatePriorities.containsKey(oe)) { + thisPriority = templatePriorities.get(oe); + } + if (thisPriority > templatePriority) { + templatePriority = thisPriority; + templateName = testTemplateName; + } + } else { + LOGGER.info("Template {} does not exist ", + testTemplateName); + } + } + } + } + if (templateName != null + && !templateService.checkTemplateExists(templateName)) { + throw new IOException("Cant find template " + templateName + + " specified by " + CONFIG_FINALTEMPLATE); + } + LOGGER.info("Template setup {} ", templateName); + String contentType = (String) config.get(CONFIG_CONTENT_TYPE); + String contentEncoding = (String) config.get(CONFIG_CONTENT_ENCODING); + + if (templateName != null) { + LOGGER.info("Rendering with {} as the base of the result map.", key); + if (contentType != null) { + response.setContentType(contentType); + } else { + response.setContentType("application/octet"); + } + if (contentEncoding != null) { + response.setCharacterEncoding(contentEncoding); + } + templateService.process(fullMap, "UTF-8", response.getWriter(), + templateName); + } else { + response.setContentType("application/json"); + response.setCharacterEncoding("UTF-8"); + Gson gson = new GsonBuilder().setPrettyPrinting().registerTypeHierarchyAdapter(Resource.class, new ResourceSerializer()).create(); + if (fullMap.containsKey(RESULT)) { + response.getWriter().write(gson.toJson(fullMap.get(RESULT))); + } else { + response.getWriter().write(gson.toJson(fullMap)); + } + } + } + + public String getName() { + return "RDFToHTMLResolvedJsonProxyPostProcessor"; + } + + public boolean sendCached(Map config, + Map templateParams, HttpServletResponse response) throws IOException { + String cacheKey = (String) config.get("cachekey"); + + if (cacheManagerService != null && cacheKey != null) { + ResolverHolder.set(rdfResourceResolver); + try { + String key = (String) templateParams.get(cacheKey); + if (key != null) { + Cache> cache = cacheManagerService + .getCache(RdfResourceResolver.RDFMAPS_CACHE_NAME, + CacheScope.INSTANCE); + if (cache.containsKey(key)) { + Map cachedMap = cache.get(key); + if ( cachedMap != null ) { + Map result = Maps.newHashMap(); + result.put(RESULT, cachedMap); + sendResult(key, config, result, response); + return true; + } + } + } + } finally { + ResolverHolder.set(rdfResourceResolver); + } + } + return false; + } + + +} \ No newline at end of file diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RDFToMap.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RDFToMap.java new file mode 100644 index 00000000..4cbd8886 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RDFToMap.java @@ -0,0 +1,350 @@ +package uk.co.tfd.sm.proxy; + +import java.io.Reader; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import javax.xml.namespace.QName; +import javax.xml.stream.EventFilter; +import javax.xml.stream.XMLEventReader; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.events.Attribute; +import javax.xml.stream.events.Characters; +import javax.xml.stream.events.StartElement; +import javax.xml.stream.events.XMLEvent; + +import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.memory.Cache; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +public class RDFToMap { + + private static final String FQ_ABOUT = "http://www.w3.org/1999/02/22-rdf-syntax-ns#about"; + private static final String RDF_NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; + private static final QName RDF_RESOURCE = new QName(RDF_NS, "resource"); + private static final QName RDF_ABOUT = new QName(RDF_NS, "about"); + private static final QName RDF_DESCRIPTION = new QName(RDF_NS, + "Description"); + private static final Map> EMPTY_MAP = ImmutableMap.of(); + private XMLInputFactory xmlInputFactory; + private Map nsPrefixMap; + private Map> tripleMap; + private Map resolvedMap; + private Map resolverConfig; + + public RDFToMap(Map nsPrefixMap) { + init(nsPrefixMap); + } + + + public RDFToMap(String namespaceMapConfig, Map resolverConfig) { + String[] pairs = StringUtils.split(namespaceMapConfig, ";"); + Builder b = ImmutableMap.builder(); + if ( pairs != null ) { + for (String pair : pairs) { + String[] kv = StringUtils.split(pair, "=", 2); + if (kv == null || kv.length == 0 ) { + throw new RuntimeException( + "Names space key value pairs must be of the form ns=nsuri;ns=nsuri failed to parse " + + namespaceMapConfig); + } else if ( kv.length == 1) { + b.put(kv[0].trim(),""); + } else { + b.put(kv[1].trim(), kv[0].trim()); + } + } + } + init(b.build()); + this.resolverConfig = resolverConfig; + } + + + private void init(Map nsPrefixMap) { + xmlInputFactory = XMLInputFactory.newInstance(); + xmlInputFactory.setProperty(XMLInputFactory.IS_COALESCING, true); + this.nsPrefixMap = nsPrefixMap; + } + + + public RDFToMap readMap(Reader reader) + throws XMLStreamException { + + XMLEventReader eventReader = xmlInputFactory + .createXMLEventReader(reader); + // Create a filtered reader + XMLEventReader filteredEventReader = xmlInputFactory + .createFilteredReader(eventReader, new EventFilter() { + + public boolean accept(XMLEvent event) { + // Exclude PIs + return (!event.isProcessingInstruction()); + } + }); + + tripleMap = Maps.newHashMap(); + Map currentMap = null; + String key = null; + StringBuilder body = null; + int state = 1; + while (filteredEventReader.hasNext()) { + XMLEvent e = filteredEventReader.nextEvent(); + switch (e.getEventType()) { + case XMLEvent.START_ELEMENT: + StartElement startElement = e.asStartElement(); + QName name = startElement.getName(); + if (state == 1 && name.equals(RDF_DESCRIPTION)) { + Attribute desc = startElement.getAttributeByName(RDF_ABOUT); + currentMap = getMap(desc.getValue(), tripleMap); + + state = 2; + } else if (state == 2) { + Attribute resource = startElement + .getAttributeByName(RDF_RESOURCE); + key = name.getNamespaceURI() + name.getLocalPart(); + if (resource != null) { + String value = resource.getValue(); + if ( isDefaultNamespaceURI(value)) { + putMap(currentMap, key, new ResolvableResource(processNamespaceURI(value), resolverConfig)); + } else { + putMap(currentMap, key, + new NonResolvableResource(processNamespaceURI(value))); + } + state = 4; + } else { + body = new StringBuilder(); + state = 3; + } + } + break; + case XMLEvent.END_ELEMENT: + if (state == 2) { + currentMap = null; + state = 1; + } else if (state == 3) { + putMap(currentMap, key, body.toString()); + body = null; + key = null; + state = 2; + } else if (state == 4) { + state = 2; + } + break; + case XMLEvent.CDATA: + case XMLEvent.CHARACTERS: + if (state == 3) { + // accumulate the body + Characters characters = e.asCharacters(); + if (characters.isWhiteSpace() + && !characters.isIgnorableWhiteSpace() + || !characters.isWhiteSpace()) { + body.append(characters.getData()); + } + + } + break; + } + } + return this; + } + + public RDFToMap resolveToFullJson() { + Set resolving = Sets.newHashSet(); + resolvedMap = Maps.newHashMap(); + resolveToFullJson(resolvedMap, EMPTY_MAP, tripleMap, resolving); + Map invertedNsPrefixMap = invertMap(nsPrefixMap); + accumulate(resolvedMap, "_namespaces", invertedNsPrefixMap); + accumulate(resolvedMap, "_default", invertedNsPrefixMap.get("")); + return this; + } + + + + + public String toJson(boolean indented) { + if (indented ) { + Gson gson = new GsonBuilder().setPrettyPrinting().registerTypeHierarchyAdapter(Resource.class, new ResourceSerializer()).create(); + return gson.toJson(resolvedMap); + } else { + Gson gson = new GsonBuilder().registerTypeHierarchyAdapter(Resource.class, new ResourceSerializer()).create(); + return gson.toJson(resolvedMap); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private void accumulate(Map output, String key, + Object value) { + if (output.containsKey(key)) { + Object o = output.get(key); + if ( o instanceof List) { + ((List) o).add(value); + } else { + output.put(key, Lists.newArrayList(o, value)); + } + } else { + output.put(key, value); + } + } + + private Map invertMap(Map map) { + Builder b = ImmutableMap.builder(); + for ( Entry e : map.entrySet()) { + b.put(e.getValue(), e.getKey()); + } + return b.build(); + } + + private void resolveToFullJson(Map output, + Map> baseMap, Map m, Set resolving) { + for (Entry e : m.entrySet()) { + resolveValueToFullJson(e.getKey(), e.getValue(), output, baseMap, m, resolving); + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private void resolveValueToFullJson(String k, Object v, + Map output, + Map> baseMap, Map m, + Set resolving) { + String resourceRef = getResourceRef(v); + if (v instanceof Map) { +// LOGGER.info("Did not resolve {} adding Map ", k); + Map nobj = Maps.newHashMap(); + accumulate(output, k, nobj); + accumulate(nobj, processNamespaceURI(FQ_ABOUT), k); + resolveToFullJson(nobj, baseMap, (Map) v, resolving); + } else if (v instanceof Set) { +// LOGGER.info("Did not resolve {} adding Set ", k); + List resolvedInSet = Lists.newArrayList(); + int i = 0; + for (Object ov : (Set) v) { + String key = getResourceRef(ov); + if (key != null && !resolving.contains(key)) { + resolvedInSet.add(i, key); + resolving.add(key); + } else { + resolvedInSet.add(i, null); + } + i++; + } + i = 0; + for (Object ov : (Set) v) { + if (resolvedInSet.get(i) != null) { + resolving.remove(resolvedInSet.get(i)); + } + resolveValueToFullJson(k, ov, output, baseMap, m, resolving); + i++; + } + } else if (resourceRef != null) { + if (!resolving.contains(resourceRef) + && baseMap.containsKey(resourceRef) + && baseMap.get(resourceRef) instanceof Map) { +// LOGGER.info("Resolved and Accumunated {} ", resourceRef); + Map nobj = Maps.newHashMap(); + accumulate(output, k, nobj); + accumulate(nobj, processNamespaceURI(FQ_ABOUT), resourceRef); + resolving.add(resourceRef); + resolveToFullJson(nobj, baseMap, baseMap.get(resourceRef), + resolving); + resolving.remove(resourceRef); + } else { +// LOGGER.info("Did not resolve {} adding String {} ", k, v); + accumulate(output, k, v); + } + } else { +// LOGGER.info("Did not resolve {} adding Object ", k); + accumulate(output, k, v); + } + } + + private String getResourceRef(Object ov) { + if ( ov instanceof String && ((String) ov).startsWith("rdf:resource:")) { + return ((String) ov).substring("rdf:resource:".length()); + } + return null; + } + + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private void putMap(Map map, String keyWithNamespace, Object value) { + String key = processNamespaceURI(keyWithNamespace); + if (map.containsKey(key)) { + Object o = map.get(key); + if (o instanceof Set) { + ((Set) o).add(value); + } else { + map.put(key, Sets.newHashSet(o, value)); + } + } else { + map.put(key, value); + } + + } + + private Map getMap(String keyWithNamespace, + Map> tripleMap) { + String key = processNamespaceURI(keyWithNamespace); + if (tripleMap.containsKey(key)) { +// LOGGER.info("Map for {} already exists ", key); + return tripleMap.get(key); + + } else { + Map m = Maps.newHashMap(); + tripleMap.put(key, m); + return m; + } + } + + private boolean isDefaultNamespaceURI(String keyWithNamespace) { + for ( Entry e : nsPrefixMap.entrySet() ) { + if ( keyWithNamespace.startsWith(e.getKey())) { + String ns = e.getValue(); + if ( ns.length() > 0 ) { + return false; + } else { + return true; + } + } + } + return false; + } + + private String processNamespaceURI(String keyWithNamespace) { + for ( Entry e : nsPrefixMap.entrySet() ) { + if ( keyWithNamespace.startsWith(e.getKey())) { + String ns = e.getValue(); + if ( ns.length() > 0 ) { + return ns+"_"+keyWithNamespace.substring(e.getKey().length()); + } else { + return keyWithNamespace.substring(e.getKey().length()); + } + } + } + return keyWithNamespace; + } + + + public Map toMap() { + return resolvedMap; + } + + + public void saveCache(Cache> cache) { + for ( Entry> e : tripleMap.entrySet()) { + Object o = e.getValue(); + if ( o instanceof Map) { + cache.put(e.getKey(), e.getValue()); + } + } + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RDFToResolvedJsonProxyPostProcessor.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RDFToResolvedJsonProxyPostProcessor.java new file mode 100644 index 00000000..9ddb6262 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RDFToResolvedJsonProxyPostProcessor.java @@ -0,0 +1,102 @@ +package uk.co.tfd.sm.proxy; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Map; + +import javax.servlet.http.HttpServletResponse; +import javax.xml.stream.XMLStreamException; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.memory.Cache; +import org.sakaiproject.nakamura.api.memory.CacheManagerService; +import org.sakaiproject.nakamura.api.memory.CacheScope; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyPostProcessor; +import uk.co.tfd.sm.api.proxy.ProxyResponse; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +@Component(immediate = true, metatype = true) +@Service(value = ProxyPostProcessor.class) +public class RDFToResolvedJsonProxyPostProcessor implements ProxyPostProcessor { + + private static final Logger LOGGER = LoggerFactory + .getLogger(RDFToResolvedJsonProxyPostProcessor.class); + + public static final String CONFIG_NAMESPACEMAP = "namespacemap"; + + @Reference + private Resolver rdfResourceResolver; + + @Reference + private CacheManagerService cacheManagerService; + + public void process(Map config, + Map templateParams, HttpServletResponse response, + ProxyResponse proxyResponse) throws IOException { + String namespaceMapConfig = (String) templateParams + .get(CONFIG_NAMESPACEMAP); + try { + ResolverHolder.set(rdfResourceResolver); + RDFToMap rdfToMap = new RDFToMap(namespaceMapConfig, config); + response.getWriter().append( + rdfToMap.readMap( + new StringReader(proxyResponse + .getResponseBodyAsString())) + .resolveToFullJson().toJson(false)); + if (cacheManagerService != null) { + Cache> cache = cacheManagerService + .getCache(RdfResourceResolver.RDFMAPS_CACHE_NAME, + CacheScope.INSTANCE); + rdfToMap.saveCache(cache); + } + } catch (XMLStreamException e) { + LOGGER.error(e.getMessage(), e); + response.sendError(500, + "Failed to parse response from remote server"); + } finally { + ResolverHolder.clear(); + } + + } + + public boolean sendCached(Map config, + Map templateParams, HttpServletResponse response) + throws IOException { + String cacheKey = (String) config.get("cachekey"); + if (cacheManagerService != null && cacheKey != null) { + String key = (String) templateParams.get(cacheKey); + if (key != null) { + ResolverHolder.set(rdfResourceResolver); + try { + Cache> cache = cacheManagerService + .getCache(RdfResourceResolver.RDFMAPS_CACHE_NAME, + CacheScope.INSTANCE); + if (cache.containsKey(key)) { + Gson gson = new GsonBuilder() + .setPrettyPrinting() + .registerTypeHierarchyAdapter(Resource.class, + new ResourceSerializer()).create(); + response.getWriter().write( + gson.toJson(cache.get(cacheKey))); + return true; + } + } finally { + ResolverHolder.clear(); + } + } + } + return false; + } + + public String getName() { + return "RDFToResolvedJsonProxyPostProcessor"; + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RdfResourceResolver.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RdfResourceResolver.java new file mode 100644 index 00000000..61243945 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/RdfResourceResolver.java @@ -0,0 +1,97 @@ +package uk.co.tfd.sm.proxy; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Map; + +import javax.xml.stream.XMLStreamException; + +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Deactivate; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.memory.Cache; +import org.sakaiproject.nakamura.api.memory.CacheManagerService; +import org.sakaiproject.nakamura.api.memory.CacheScope; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyClientException; +import uk.co.tfd.sm.api.proxy.ProxyClientService; +import uk.co.tfd.sm.api.proxy.ProxyResponse; + +import com.google.common.collect.ImmutableMap; + +@Component(immediate = true, metatype = true) +@Service(value = Resolver.class) +public class RdfResourceResolver implements Resolver { + + public static final String RDFMAPS_CACHE_NAME = "rdfmaps"; + + private static final Map EMPTY_MAP = ImmutableMap.of(); + + private static final Logger LOGGER = LoggerFactory + .getLogger(RdfResourceResolver.class); + + @Reference + private ProxyClientService proxyClientService; + + @Reference + private CacheManagerService cacheManagerService; + + @Activate + protected void activate(Map properties) { + Cache> cache = cacheManagerService.getCache(RDFMAPS_CACHE_NAME, CacheScope.INSTANCE); + cache.clear(); + } + + @Deactivate + protected void deactivate(Map properties) { + Cache> cache = cacheManagerService.getCache(RDFMAPS_CACHE_NAME, CacheScope.INSTANCE); + cache.clear(); + } + + + @SuppressWarnings("unchecked") + public Map get(String key, + Map resolverConfig) throws IOException { + Cache> cache = cacheManagerService.getCache(RDFMAPS_CACHE_NAME, CacheScope.INSTANCE); + if ( cache.containsKey(key)) { + return cache.get(key); + } + String targetPath = (String) resolverConfig.get("resolver-endpoint"); + if ( targetPath == null ) { + targetPath = (String) resolverConfig.get(ProxyClientService.CONFIG_REQUEST_PROXY_ENDPOINT); + } + try { + ProxyResponse proxyResponse = proxyClientService.executeCall( + ImmutableMap.of( + ProxyClientService.CONFIG_REQUEST_PROXY_ENDPOINT, + (Object) targetPath, + ProxyClientService.CONFIG_REQUEST_PROXY_METHOD, + "GET"), EMPTY_MAP, ImmutableMap.of("vid", + (Object) key), null, -1L, null); + String namespaceMapConfig = (String) resolverConfig + .get(RDFToHTMLProxyPostProcessor.CONFIG_NAMESPACEMAP); + RDFToMap rdfToMap = new RDFToMap(namespaceMapConfig, resolverConfig); + rdfToMap.readMap( + new StringReader(proxyResponse.getResponseBodyAsString())) + .resolveToFullJson(); + rdfToMap.saveCache(cache); + Map fullMap = rdfToMap.toMap(); + if (key != null && fullMap.containsKey(key)) { + fullMap = ImmutableMap.copyOf((Map) fullMap + .get(key)); + } + cache.put(key, fullMap); + return fullMap; + } catch (ProxyClientException e) { + LOGGER.warn(e.getMessage(), e); + } catch (XMLStreamException e) { + LOGGER.warn(e.getMessage(), e); + } + return EMPTY_MAP; + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ResolvableResource.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ResolvableResource.java new file mode 100644 index 00000000..5b1acbdb --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ResolvableResource.java @@ -0,0 +1,68 @@ +package uk.co.tfd.sm.proxy; + +import java.io.IOException; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; + +public class ResolvableResource implements Resource { + + private static final Logger LOGGER = LoggerFactory.getLogger(ResolvableResource.class); + private String value; + private String label; + private Map resolverConfig; + private Map target; + public ResolvableResource(String value, Map resolverConfig) { + this.value = value; + this.resolverConfig = resolverConfig; + LOGGER.info("Resolvable Resource Created for {} ",value); + } + + + public synchronized String getLabel() throws IOException { + if ( label == null ) { + Map tm = getTarget(); + if ( tm != null ) { + label = (String) tm.get("rdfs_label"); + } + } + LOGGER.info("Got Lable as {} ",label); + return label; + } + + public synchronized Map getTarget() throws IOException { + if ( target == null ) { + Resolver resolver = ResolverHolder.get(); + if ( resolver == null ) { + LOGGER.info("Resolver was null"); + return ImmutableMap.of("rdfs_label",(Object)(" No Resolver for "+value)); + } else { + target = resolver.get(value, resolverConfig); + } + } + return target; + } + + public String getKey() { + return value; + } + + public boolean hasLabelAndKey() { + LOGGER.info("Checking Label and Key on {} ",value); + return true; + } + + public boolean isReference() { + LOGGER.info("Checking Reference and Key on {} ",value); + return true; + } + + @Override + public String toString() { + return value; + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/Resolver.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/Resolver.java new file mode 100644 index 00000000..9dbd7b77 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/Resolver.java @@ -0,0 +1,10 @@ +package uk.co.tfd.sm.proxy; + +import java.io.IOException; +import java.util.Map; + +public interface Resolver { + + Map get(String key, Map resolverConfig) throws IOException; + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ResolverHolder.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ResolverHolder.java new file mode 100644 index 00000000..e4391892 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ResolverHolder.java @@ -0,0 +1,19 @@ +package uk.co.tfd.sm.proxy; + +public class ResolverHolder { + + private static ThreadLocal holder = new ThreadLocal(); + + public static Resolver get() { + return holder.get(); + } + + public static void set(Resolver resolver) { + holder.set(resolver); + } + + public static void clear() { + holder.set(null); + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/Resource.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/Resource.java new file mode 100644 index 00000000..d71b74ce --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/Resource.java @@ -0,0 +1,5 @@ +package uk.co.tfd.sm.proxy; + +public interface Resource { + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ResourceSerializer.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ResourceSerializer.java new file mode 100644 index 00000000..ab2d459a --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/ResourceSerializer.java @@ -0,0 +1,17 @@ +package uk.co.tfd.sm.proxy; + +import java.lang.reflect.Type; + +import com.google.gson.JsonElement; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +public class ResourceSerializer implements JsonSerializer { + + public JsonElement serialize(Resource resource, Type tupeOfSrc, + JsonSerializationContext context) { + return new JsonPrimitive(resource.toString()); + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/Signature.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/Signature.java new file mode 100644 index 00000000..ae20a9e7 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/Signature.java @@ -0,0 +1,62 @@ +package uk.co.tfd.sm.proxy; + +import java.io.UnsupportedEncodingException; +import java.security.InvalidKeyException; +import java.security.NoSuchAlgorithmException; +import java.security.SignatureException; + +import javax.crypto.Mac; +import javax.crypto.spec.SecretKeySpec; + +import org.apache.commons.codec.binary.Base64; + +public class Signature { + + private static final String HMAC_SHA1_ALGORITHM = "HmacSHA1"; + + /** + * Calculate an RFC2104 compliant HMAC (Hash-based Message Authentication Code) + * + * @param data + * The data to be signed. This data is pushed through a hex converter in this + * method, so there is no need to do this before generating the HMAC. + * @param key + * The signing key. + * @return The Base64-encoded RFC 2104-compliant HMAC signature. + * @throws java.security.SignatureException + * when signature generation fails + */ + + public static String calculateRFC2104HMAC(String data, String key) + throws java.security.SignatureException { + if (data == null) { + throw new IllegalArgumentException("String data == null"); + } + if (key == null) { + throw new IllegalArgumentException("String key == null"); + } + try { + // Get an hmac_sha1 key from the raw key bytes + byte[] keyBytes = key.getBytes("UTF-8"); + SecretKeySpec signingKey = new SecretKeySpec(keyBytes, HMAC_SHA1_ALGORITHM); + + // Get an hmac_sha1 Mac instance and initialize with the signing key + Mac mac = Mac.getInstance(HMAC_SHA1_ALGORITHM); + mac.init(signingKey); + + // Compute the hmac on input data bytes + byte[] rawHmac = mac.doFinal(data.getBytes("UTF-8")); + + // Convert raw bytes to encoding + return Base64.encodeBase64URLSafeString(rawHmac); + + } catch (NoSuchAlgorithmException e) { + throw new SignatureException("Failed to generate HMAC : " + e.getMessage(), e); + } catch (InvalidKeyException e) { + throw new SignatureException("Failed to generate HMAC : " + e.getMessage(), e); + } catch (UnsupportedEncodingException e) { + throw new SignatureException("Failed to generate HMAC : " + e.getMessage(), e); + } + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/TrustedLoginTokenProxyPostProcessor.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/TrustedLoginTokenProxyPostProcessor.java new file mode 100644 index 00000000..20e49999 --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/TrustedLoginTokenProxyPostProcessor.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Properties; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyPostProcessor; +import uk.co.tfd.sm.api.proxy.ProxyResponse; + +import java.io.IOException; +import java.util.Map; + +import javax.servlet.http.HttpServletResponse; + +/** + * Will verify two required templateParams (i.e. hostname and + * port) and then process. + * + * 1) Added a new PostProcessor called TrustedLoginTokenProxyPostProcessor + * and added it to the list of trusted PostProcessors in + * ProxyClientServiceImpl.
    + * 2) TrustedLoginTokenProxyPostProcessor (i.e. " + * trustedLoginTokenProxyPostProcessor") is now bound to the two proxy nodes: + * a) + * bundles/proxy/src/main/resources/SLING-INF/content/var/proxy/s23/site.json + * and b) + * bundles/proxy/src/main/resources/SLING-INF/content/var/proxy/s23/sites.json + * .
    + * 3) TrustedLoginTokenProxyPostProcessor now verifies that the ${hostname} + * and ${port} values are required and MUST match the values for these two variables as + * defined in the PreProcessor: TrustedLoginTokenProxyPreProcessor. + * + * This should prevent any shenanigans with this particular proxy. L + */ +@Service +@Component +@Properties(value = { + @Property(name = "service.vendor", value = "The Sakai Foundation"), + @Property(name = "service.description", value = "Will verify two required templateParams (i.e. hostname and port) and then process.") }) +public class TrustedLoginTokenProxyPostProcessor implements ProxyPostProcessor { + public static final String NAME = "trustedLoginTokenProxyPostProcessor"; + protected transient DefaultProxyPostProcessorImpl dpppi = new DefaultProxyPostProcessorImpl(); + + @Reference + protected transient TrustedLoginTokenProxyPreProcessor tltppp; + + private static final Logger LOG = LoggerFactory + .getLogger(TrustedLoginTokenProxyPostProcessor.class); + + /** + * {@inheritDoc} + * + * @see uk.co.tfd.sm.api.proxy.ProxyPostProcessor#process(java.util.Map, + * org.apache.sling.api.SlingHttpServletResponse, + * uk.co.tfd.sm.api.proxy.ProxyResponse) + */ + public void process(Map config, Map templateParams, + HttpServletResponse response, ProxyResponse proxyResponse) throws IOException { + LOG.debug( + "process(Map {}, SlingHttpServletResponse response, ProxyResponse proxyResponse)", + templateParams); + if (templateParams == null || !tltppp.hostname.equals(templateParams.get("hostname")) + || tltppp.port != (Integer) templateParams.get("port")) { + response.sendError(HttpServletResponse.SC_BAD_REQUEST); + return; + } + // just use DefaultProxyPostProcessorImpl behavior + dpppi.process(config, templateParams, response, proxyResponse); + return; + } + + /** + * {@inheritDoc} + * + * @see uk.co.tfd.sm.api.proxy.ProxyPostProcessor#getName() + */ + public String getName() { + return NAME; + } + +} diff --git a/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/TrustedLoginTokenProxyPreProcessor.java b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/TrustedLoginTokenProxyPreProcessor.java new file mode 100644 index 00000000..4bdd19fe --- /dev/null +++ b/extensions/proxy/src/main/java/uk/co/tfd/sm/proxy/TrustedLoginTokenProxyPreProcessor.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import java.security.SignatureException; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Properties; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Service; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyPreProcessor; + +/** + * This pre processor adds a header to the proxy request that is picked up by + * the far end to identify the users. The far end has to a) share the same + * shared token and b) have something to decode the token. The class was + * originally designed to work with a TrustedTokenLoginFilter for Sakai 2, but + * the handshake protocol is so simple it could be used with any end point. + * There is one configuration item, the sharedSecret that must match the far + * end. At the moment this component is configured to be a singleton service but + * if this mechanism of authenticating proxies becomes wide spread we may want + * this class to be come a service factory so that we can support many trust + * relationships. + * + */ +@Service(value = ProxyPreProcessor.class) +@Component(metatype = true, immediate = true) +@Properties(value = { + @Property(name = "service.description", value = { "Pre processor for proxy requests to Sakai 2 instance with a trusted token filter." }), + @Property(name = "service.vendor", value = { "The Sakai Foundation" }) }) +public class TrustedLoginTokenProxyPreProcessor implements ProxyPreProcessor { + + public static final String SECURE_TOKEN_HEADER_NAME = "x-sakai-token"; + public static final String TOKEN_SEPARATOR = ";"; + + private static final Logger LOGGER = LoggerFactory + .getLogger(TrustedLoginTokenProxyPreProcessor.class); + + @Property(name = "sharedSecret") + private String sharedSecret = "e2KS54H35j6vS5Z38nK40"; + + @Property(name = "port", intValue = 80) + protected int port; + + @Property(name = "hostname", value = { "localhost" }) + protected String hostname; + + public String getName() { + return "trusted-token"; + } + + public void preProcessRequest(HttpServletRequest request, + Map headers, Map templateParams) { + + String user = request.getRemoteUser(); + String hmac; + final String message = user + TOKEN_SEPARATOR + + System.currentTimeMillis(); + try { + hmac = Signature.calculateRFC2104HMAC(message, sharedSecret); + } catch (SignatureException e) { + LOGGER.error(e.getLocalizedMessage(), e); + throw new Error(e); + } + final String token = hmac + TOKEN_SEPARATOR + message; + headers.put(SECURE_TOKEN_HEADER_NAME, token); + + templateParams.put("port", port); + templateParams.put("hostname", hostname); + } + + /** + * When the bundle gets activated we retrieve the OSGi properties. + * + * @param context + */ + protected void activate(Map props) { + // Get the properties from the console. + sharedSecret = toString(props.get("sharedSecret"), + "e2KS54H35j6vS5Z38nK40"); + hostname = toString(props.get("hostname"), "localhost"); + LOGGER.info(" Trusted hostname: " + hostname); + port = toInteger(props.get("port"), 80); + LOGGER.info("Trusted port: " + port); + } + + private int toInteger(Object object, int defaultValue) { + if ( object == null ) { + return defaultValue; + } + return Integer.parseInt(String.valueOf(object)); + } + + private String toString(Object object, String defaultValue) { + if (object == null) { + return defaultValue; + } + return String.valueOf(object); + } + +} diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/bookmarkandshare/popular b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/bookmarkandshare/popular new file mode 100644 index 00000000..6b275ddc --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/bookmarkandshare/popular @@ -0,0 +1,3 @@ +request-proxy-endpoint = http://www.addthis.com/services/trends-load/format/json?cat=&cnt=10 +request-proxy-method = GET +# Get the most popular services from Add This diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/delicious/bookmarks b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/delicious/bookmarks new file mode 100644 index 00000000..1b347c80 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/delicious/bookmarks @@ -0,0 +1,8 @@ +request-proxy-endpoint = http://feeds.delicious.com/v2/json/${mode}?count=${count} +request-proxy-method = GET +# Get bookmarks from the Delicious JSON feed. +# parameters +# mode The correct mode of which the bookmarks should be obtained. +# This is a part of the proxy endpoint and contains a username whenever needed +# (modes: personal, network, subscriptions). +# count The number of bookmarks to fetch. This is a part of the proxy endpoint. \ No newline at end of file diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetFriendsFromUser b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetFriendsFromUser new file mode 100644 index 00000000..8f9fcf78 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetFriendsFromUser @@ -0,0 +1,8 @@ +request-proxy-endpoint = http://api.flickr.com/services/rest/?method=flickr.contacts.getPublicList&api_key=${config.flickr_api_key}&user_id=${userid}&page=1&per_page=15&format=json&nojsoncallback=1 +request-proxy-method = GET +# flickr- Get the public contacts from a user +# This proxy will get the public contacts from a user. +# This template requires an API key. +# It can be defined by configuring org.sakaiproject.nakamura.proxy.ProxyClientServiceImpl flickr_api_key=93a86c06dc382a19bff0d4d24872ecab +# parameters +# userid (Required) The user id diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetPhotoInfo b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetPhotoInfo new file mode 100644 index 00000000..dc389d84 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetPhotoInfo @@ -0,0 +1,2 @@ +request-proxy-endpoint = http://api.flickr.com/services/rest/?method=flickr.photos.getInfo&api_key=${config.flickr_api_key}&photo_id=${photoId}&format=json&nojsoncallback=1 +request-proxy-method = GET diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetPicturesByUserId b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetPicturesByUserId new file mode 100644 index 00000000..9f06e629 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetPicturesByUserId @@ -0,0 +1,7 @@ +request-proxy-endpoint = http://api.flickr.com/services/rest/?method=flickr.people.getPublicPhotos&api_key=${config.flickr_api_key}&user_id=${userid}&safe_search=true&per_page=${per_page}&page=${page}&format=json&nojsoncallback=1 +request-proxy-method = GET +# flickr- Get the public pictures from a user +# Parameters +# userid (Required)The user id +# page (Required)The page +# per_page (Required)Images per page diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetUserDetailsByEMail b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetUserDetailsByEMail new file mode 100644 index 00000000..7f5eab8b --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetUserDetailsByEMail @@ -0,0 +1,4 @@ +request-proxy-endpoint = http://api.flickr.com/services/rest/?method=flickr.people.findByEmail&api_key=${config.flickr_api_key}&find_email=${email}&format=json&nojsoncallback=1 +request-proxy-method = GET +# flickr- Get user-id by e-mail +# e-mail (Required)The e-mail address key diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetUserDetailsByName b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetUserDetailsByName new file mode 100644 index 00000000..96135653 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrGetUserDetailsByName @@ -0,0 +1,4 @@ +request-proxy-endpoint = http://api.flickr.com/services/rest/?method=flickr.people.findByUsername&api_key=${config.flickr_api_key}&username=${name}&format=json&nojsoncallback=1 +request-proxy-method = GET +# flickr- Get user-id by name +# name (Required)The name \ No newline at end of file diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrKeyPictures b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrKeyPictures new file mode 100644 index 00000000..09afc751 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/flickr/flickrKeyPictures @@ -0,0 +1,8 @@ +request-proxy-endpoint = http://api.flickr.com/services/rest?method=flickr.photos.search&api_key=${config.flickr_api_key}&page=${page}&media=${media}&per_page=${per_page}&tags=${tags}&format=json&nojsoncallback=1 +request-proxy-method = GET +# flickr- Get pictures by searchterm", +# parameters +# media (Required)Set the kind of media that will be returned +# per_page (Required)Set the images that will be returned per page +# tags (Required)All the searchterms +# page (Required)The requested page diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/google/picasaGetPhoto b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/google/picasaGetPhoto new file mode 100644 index 00000000..9f3a1465 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/google/picasaGetPhoto @@ -0,0 +1,7 @@ +request-proxy-endpoint = https://picasaweb.google.com/data/feed/base/user/${userId}/album/${albumName}/photoid/${photoId}?alt=json +request-proxy-method = GET +# Get an image from Picasa. +# Parameters +# userId the Pisca userId +# albumName the Pisca albumName +# photoId the PiscaphotoId diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/header b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/header new file mode 100644 index 00000000..498b4266 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/header @@ -0,0 +1,3 @@ +request-proxy-endpoint = ${url} +request-proxy-method = HEAD +postprocessor = header diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/rss b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/rss new file mode 100644 index 00000000..c9933d8b --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/rss @@ -0,0 +1,5 @@ +proxy-limit-length = 102400 +request-proxy-endpoint = ${rss} +request-proxy-method = GET +postprocessor = rss +preprocessor = rss \ No newline at end of file diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/site b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/site new file mode 100644 index 00000000..6328628d --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/site @@ -0,0 +1,9 @@ +request-proxy-endpoint = http://${hostname}:${port}/sakai-hybrid/site?siteId=${siteid} +request-proxy-method = GET +preprocessor = trusted-token +postprocessor = trustedLoginTokenProxyPostProcessor +# Sakai2: get site information +# Parameters +# hostname the Sakai 2 host name +# port the Sakai 2 port +# siteid the Site ID diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sites b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sites new file mode 100644 index 00000000..e28989d6 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sites @@ -0,0 +1,9 @@ +request-proxy-endpoint = http://${hostname}:${port}/sakai-hybrid/sites +request-proxy-method = GET +preprocessor = trusted-token +postprocessor = trustedLoginTokenProxyPostProcessor +# Get the sites from Sakai2 for the current logged in user. +# Parameters +# hostname the Sakai 2 host name +# port the Sakai 2 port + diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sitesCategorized b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sitesCategorized new file mode 100644 index 00000000..5b15f0a9 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sitesCategorized @@ -0,0 +1,9 @@ +request-proxy-endpoint = http://${hostname}:${port}/sakai-hybrid/sites?categorized=${categorized} +request-proxy-method = GET +preprocessor = trusted-token +postprocessor = trustedLoginTokenProxyPostProcessor +# Get the sites from Sakai2 for the current logged in user. +# hostname the Sakai 2 host name +# port the Sakai 2 port +# categorized how the sites are going to be categorized + diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sitesDebug b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sitesDebug new file mode 100644 index 00000000..554627c1 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sitesDebug @@ -0,0 +1,8 @@ +request-proxy-endpoint = http://${hostname}:${port}/sakai-hybrid/sites?categorized=${categorized}&unread=${unread}&l=${l} +request-proxy-method = GET +preprocessor = trusted-token +postprocessor = trustedLoginTokenProxyPostProcessor +# Get the sites from Sakai2 for the current logged in user. +# hostname the Sakai 2 host name +# port the Sakai 2 port +# categorized how the sites are going to be categorized diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sitesUnread b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sitesUnread new file mode 100644 index 00000000..fdeaa266 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/s23/sitesUnread @@ -0,0 +1,4 @@ +request-proxy-endpoint = http://${hostname}:${port}/sakai-hybrid/sites?unread=${unread} +request-proxy-method = GET +preprocessor = trusted-token +postprocessor = trustedLoginTokenProxyPostProcessor \ No newline at end of file diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/slideshare/slideshareGetSlideshow b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/slideshare/slideshareGetSlideshow new file mode 100644 index 00000000..b72adb60 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/slideshare/slideshareGetSlideshow @@ -0,0 +1,10 @@ +request-proxy-endpoint = http://www.slideshare.net/api/2/get_slideshow?${keyHash}&slideshow_url=${slideshow_url} +preprocessor = slideshare +request-proxy-method = GET +# Get a slide show from Slideshare.", +# This template requires an API key. It can be defined at +# org.sakaiproject.nakamura.proxy.SlideshareProxyPreProcessor +# slideshare.apiKey=0XXXyyy +# slideshare.sharedSecret=XXXXyyyX +# Parameters +# slideshow_url The url of the slideshow diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/twitter/status b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/twitter/status new file mode 100644 index 00000000..bb32a922 --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/twitter/status @@ -0,0 +1,5 @@ +request-proxy-endpoint = http://twitter.com/statuses/user_timeline/${user}.json?page=1 +request-proxy-method = GET +# Proxies Get status to the twitter API as GET http://localhost:8080/var/proxy/twitter/status.json?user=_ieb_ + + diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/twitter/update_status b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/twitter/update_status new file mode 100644 index 00000000..4d3e619a --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/twitter/update_status @@ -0,0 +1,9 @@ +request-proxy-endpoint = http://twitter.com/statuses/update.json +request-proxy-method = POST +request-content-type = application/x-www-form-urlencoded +# Proxies Update status to the twitter API as POST http://localhost:8080/var/proxy/twitter/update_status.json +# The body of the post must conform to the Twitter API for update status, as per +# password it should be supplied as in parameters as :basic-user, :basic-password +# all other parameters will be proxied through to Twitter + + diff --git a/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/videolectures/videoLecturesGetSnippet b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/videolectures/videoLecturesGetSnippet new file mode 100644 index 00000000..b693836a --- /dev/null +++ b/extensions/proxy/src/main/resources/uk/co/tfd/sm/proxy/videolectures/videoLecturesGetSnippet @@ -0,0 +1,3 @@ +request-proxy-endpoint = http://videolectures.net/${lectureId}/snippet/ +request-proxy-method = GET +# Get the snippet page for a lecture diff --git a/extensions/proxy/src/test/config/tests/test1 b/extensions/proxy/src/test/config/tests/test1 new file mode 100644 index 00000000..00257166 --- /dev/null +++ b/extensions/proxy/src/test/config/tests/test1 @@ -0,0 +1,8 @@ +preprocessor = Default +postprocessor = Default +request-proxy-endpoint = http://localhost:8080/test +request-proxy-method = GET +proxy-limit-length = 100 +proxy-request-template = a request body template +request-content-type = text/plain +proxy-header = X-test-proxy:1,X-test-proxy-2:2 \ No newline at end of file diff --git a/extensions/proxy/src/test/config/tests/vivo b/extensions/proxy/src/test/config/tests/vivo new file mode 100644 index 00000000..792d127b --- /dev/null +++ b/extensions/proxy/src/test/config/tests/vivo @@ -0,0 +1,6 @@ +preprocessor = Default +postprocessor = Default +request-proxy-endpoint = http://localhost:8080/vivo/individual/${id}/${id}.rdf +request-proxy-method = GET +# this item is in the public cache with an expiry time of 200s +request-cache-expiry = 200 \ No newline at end of file diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/DefaultProxyPostProcessorImplTest.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/DefaultProxyPostProcessorImplTest.java new file mode 100644 index 00000000..6528914b --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/DefaultProxyPostProcessorImplTest.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import uk.co.tfd.sm.api.proxy.ProxyResponse; + + +/** + * + */ +@RunWith(MockitoJUnitRunner.class) +public class DefaultProxyPostProcessorImplTest { + + @Mock + private HttpServletResponse response; + + @Mock + private ProxyResponse proxyResponse; + + private InputStream proxyResponseInputStream; + + @Mock + private ServletOutputStream responseOutputStream; + + private Map responseHeaders; + + private DefaultProxyPostProcessorImpl proxyPostProcessor; + + @Before + public void setup() throws Exception { + proxyPostProcessor = new DefaultProxyPostProcessorImpl(); + responseHeaders = new HashMap(); + putInSomeCannedHeaders(responseHeaders); + proxyResponseInputStream = new ByteArrayInputStream("Hello, world.".getBytes("UTF-8")); + } + + private void putInSomeCannedHeaders(Map headerMap) { + headerMap.put("Date", new String[] {"Wed, 24 Feb 2010 17:11:12 GMT"}); + headerMap.put("Server", new String[] {"Chunked Update Server"}); + headerMap.put("X-XSS-Protection", new String[] {"0"}); + headerMap.put("Cache-Control", new String[] {"public,max-age=21600"}); + headerMap.put("Content-Type", new String[] {"application/vnd.google.safebrowsing-chunk"}); + headerMap.put("Content", new String[] {"public,max-age=21600"}); + headerMap.put("Content-Length", new String[] {"3233"}); + headerMap.put("Age", new String[] {"19"}); + } + + @Test + public void responseHasAllHeadersFromProxyResponse() throws Exception { + //given + proxyResponseCanReturnMapOfHeaders(); + proxyResponseCanReturnBodyAsInputStream(); + slingResponseCanReturnOutputStream(); + + //when + proxyPostProcessor.process(null, null, response, proxyResponse); + + //then + for (Entry proxyResponseHeader : proxyResponse.getResponseHeaders().entrySet()) { + for (String value : proxyResponseHeader.getValue()) { + verify(response).setHeader(proxyResponseHeader.getKey(), value); + } + } + } + private void slingResponseCanReturnOutputStream() throws IOException { + when(response.getOutputStream()).thenReturn(responseOutputStream); + } + + private void proxyResponseCanReturnBodyAsInputStream() throws IOException { + when(proxyResponse.getResponseBodyAsInputStream()).thenReturn(proxyResponseInputStream); + } + + private void proxyResponseCanReturnMapOfHeaders() { + when(proxyResponse.getResponseHeaders()).thenReturn(responseHeaders); + } + + @Test + public void nameIsAsExpected() { + assertEquals("default", proxyPostProcessor.getName()); + } + +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ProxyClientServiceImplTest.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ProxyClientServiceImplTest.java new file mode 100644 index 00000000..f22008f6 --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ProxyClientServiceImplTest.java @@ -0,0 +1,321 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.Map; + +import junit.framework.Assert; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import uk.co.tfd.sm.api.proxy.ProxyClientException; +import uk.co.tfd.sm.api.proxy.ProxyClientService; +import uk.co.tfd.sm.api.proxy.ProxyResponse; +import uk.co.tfd.sm.proxy.http.CapturedRequest; +import uk.co.tfd.sm.proxy.http.DummyServer; +import uk.co.tfd.sm.template.TemplateServiceImpl; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.Maps; + + +/** + * + */ +public class ProxyClientServiceImplTest { + + /** + * + */ + private static final String APPLICATION_SOAP_XML_CHARSET_UTF_8 = "application/soap+xml; charset=utf-8"; + private static final String REQUEST_TEMPLATE = "\n" + + "" + + "" + " " + + " $stockName" + " " + + "" + ""; + + private static final String STOCK_NAME = "IBM"; + private static final String RESPONSE_BODY = "\n" + + " " + + " " + + " " + " 34.5" + + " " + "" + " "; + private static DummyServer dummyServer; + private ProxyClientServiceImpl proxyClientServiceImpl; + + @BeforeClass + public static void beforeClass() { + dummyServer = new DummyServer(); + } + + @AfterClass + public static void afterClass() { + dummyServer.close(); + } + + @Before + public void before() throws Exception { + + proxyClientServiceImpl = new ProxyClientServiceImpl(); + Map props = ImmutableMap.of(); + proxyClientServiceImpl.activate(props); + TemplateServiceImpl templateService = new TemplateServiceImpl(); + templateService.activate(props); + proxyClientServiceImpl.templateService = templateService; + } + + @After + public void after() throws Exception { + proxyClientServiceImpl.deactivate(null); + } + + @Test + public void testInvokeServiceMissingNode() throws ProxyClientException { + + Map input = Maps.newHashMap(); + Map headers = Maps.newHashMap(); + try { + ProxyResponse response = proxyClientServiceImpl.executeCall(null, headers, input, + null, 0, null); + try { + response.close(); + } catch (Throwable t) { + + } + fail(); + } catch (ProxyClientException ex) { + + } + } + + @Test + public void testInvokeServiceNodeNoEndPoint() throws ProxyClientException { + + Map config = ImmutableMap.of("path", (Object) "/testing"); + Map input = Maps.newHashMap(); + Map headers = Maps.newHashMap(); + try { + ProxyResponse response = proxyClientServiceImpl.executeCall(config, headers, input, + null, 0, null); + try { + response.close(); + } catch (Throwable t) { + + } + fail(); + } catch (ProxyClientException ex) { + } + } + + @Test + public void testInvokeServiceNodeBadEndPoint() throws Exception { + checkBadUrl("http://${url}", + "Invalid Endpoint template, relies on request to resolve valid URL http://${url}"); + checkBadUrl("h${url}", "Invalid Endpoint template, relies on request to resolve valid URL"); + checkBadUrl("${url}", "Invalid Endpoint template, relies on request to resolve valid URL"); + } + + private void checkBadUrl(String badUrl, String message) throws Exception { + + + Map config = ImmutableMap.of( + "path", (Object)"/testing", + ProxyClientService.CONFIG_REQUEST_PROXY_ENDPOINT, badUrl); + Map input = Maps.newHashMap(); + Map headers = Maps.newHashMap(); + try { + ProxyResponse response = proxyClientServiceImpl.executeCall(config, headers, input, + null, 0, null); + try { + response.close(); + } catch (Throwable t) { + + } + fail(); + } catch (ProxyClientException ex) { + assertEquals(message, ex.getMessage()); + } + } + + @Test + public void testInvokeServiceNodeEndPoint() throws ProxyClientException, + IOException { + + + dummyServer.setContentType(APPLICATION_SOAP_XML_CHARSET_UTF_8); + dummyServer.setResponseBody(RESPONSE_BODY); + + + + Map config = ImmutableMap.of( + "path", (Object)"/testing", + ProxyClientService.CONFIG_REQUEST_PROXY_ENDPOINT, dummyServer.getUrl(), + ProxyClientService.CONFIG_REQUEST_PROXY_METHOD, "POST", + ProxyClientService.CONFIG_REQUEST_CONTENT_TYPE, APPLICATION_SOAP_XML_CHARSET_UTF_8, + ProxyClientService.CONFIG_PROXY_REQUEST_TEMPLATE, REQUEST_TEMPLATE); + Map input = Maps.newHashMap(); + Map headers = Maps.newHashMap(); + + input.put("stockName", STOCK_NAME); + headers.put("SOAPAction", ""); + ProxyResponse response = proxyClientServiceImpl.executeCall(config, headers, input, + null, 0, null); + + CapturedRequest request = dummyServer.getRequest(); + assertEquals("Method not correct ", "POST", request.getMethod()); + assertEquals("No Soap Action in request", "", request.getHeader("SOAPAction")); + assertEquals("Incorrect Content Type in request", APPLICATION_SOAP_XML_CHARSET_UTF_8, + request.getContentType()); + + response.close(); + + } + + @Test + public void testInvokeServiceNodeEndPointPut() throws ProxyClientException, + IOException { + + + + dummyServer.setContentType(APPLICATION_SOAP_XML_CHARSET_UTF_8); + dummyServer.setResponseBody(RESPONSE_BODY); + + Map config = ImmutableMap.of( + "path", (Object)"/testing", + ProxyClientService.CONFIG_REQUEST_PROXY_ENDPOINT, dummyServer.getUrl(), + ProxyClientService.CONFIG_REQUEST_PROXY_METHOD, "PUT", + ProxyClientService.CONFIG_REQUEST_CONTENT_TYPE, APPLICATION_SOAP_XML_CHARSET_UTF_8, + ProxyClientService.CONFIG_PROXY_REQUEST_TEMPLATE, REQUEST_TEMPLATE.getBytes()); + Map input = Maps.newHashMap(); + Map headers = Maps.newHashMap(); + + + input.put("stockName", STOCK_NAME); + + byte[] bas = new byte[1024]; + for (int i = 0; i < bas.length; i++) { + bas[i] = (byte) (i & 0xff); + } + ByteArrayInputStream bais = new ByteArrayInputStream(bas); + ProxyResponse response = proxyClientServiceImpl.executeCall(config, headers, input, + bais, bas.length, "binary/x-data"); + + CapturedRequest request = dummyServer.getRequest(); + assertEquals("Method not correct ", "PUT", request.getMethod()); + assertEquals("Incorrect Content Type in request", "binary/x-data", + request.getContentType()); + + assertArrayEquals("Request Not equal ", bas, request.getRequestBodyAsByteArray()); + response.close(); + } + + @Test + public void testInvokeServiceNodeEndPointGet() throws ProxyClientException, + IOException { + testRequest("GET", "GET", RESPONSE_BODY, -1); + } + + @Test + public void testInvokeServiceNodeEndPointGetLimit() throws ProxyClientException, + IOException { + testRequest("GET", "GET", RESPONSE_BODY, 1020000); + } + + @Test + public void testInvokeServiceNodeEndPointGetLimitLow() throws ProxyClientException, + IOException { + testRequest("GET", "HEAD", null, 1); + } + + @Test + public void testInvokeServiceNodeEndPointOptions() throws ProxyClientException, + IOException { + testRequest("OPTIONS", "OPTIONS", RESPONSE_BODY, -1); + } + + @Test + public void testInvokeServiceNodeEndPointHead() throws ProxyClientException, + IOException { + testRequest("HEAD", "HEAD", null, -1); + } + + @Test + public void testInvokeServiceNodeEndPointOther() throws ProxyClientException, + IOException { + testRequest(null, "GET", RESPONSE_BODY, -1); + } + + private void testRequest(String type, String expectedMethod, String body, long limit) + throws ProxyClientException, IOException { + + + + + + dummyServer.setContentType(APPLICATION_SOAP_XML_CHARSET_UTF_8); + dummyServer.setResponseBody(body); + + Builder b = ImmutableMap.builder(); + b.put("path", "/testing"); + b.put(ProxyClientService.CONFIG_REQUEST_PROXY_ENDPOINT, dummyServer.getUrl()); + if ( type != null ) { + b.put(ProxyClientService.CONFIG_REQUEST_PROXY_METHOD, type); + } + if ( limit != -1 ) { + b.put(ProxyClientService.CONFIG_LIMIT_GET_SIZE, limit); + } + b.put(ProxyClientService.CONFIG_REQUEST_CONTENT_TYPE, APPLICATION_SOAP_XML_CHARSET_UTF_8); + b.put(ProxyClientService.CONFIG_PROXY_REQUEST_TEMPLATE, REQUEST_TEMPLATE.getBytes()); + Map config = b.build(); + Map input = Maps.newHashMap(); + Map headers = Maps.newHashMap(); + input.put("stockName", STOCK_NAME); + + ProxyResponse response = proxyClientServiceImpl.executeCall(config, headers, input, + null, 0, null); + + CapturedRequest request = dummyServer.getRequest(); + assertEquals("Method not correct ", expectedMethod, request.getMethod()); + assertEquals("Incorrect Content Type in request", null, request.getContentType()); + + assertEquals(type + "s dont have request bodies ", null, + request.getRequestBodyAsByteArray()); + if ( body == null ) { + Assert.assertNull(response.getResponseBodyAsString()); + } else { + assertEquals(body, response.getResponseBodyAsString().trim()); + assertEquals(APPLICATION_SOAP_XML_CHARSET_UTF_8, + response.getResponseHeaders().get("Content-Type")[0]); + } + response.close(); + } + +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ProxyResponseImplTest.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ProxyResponseImplTest.java new file mode 100644 index 00000000..01ccd212 --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ProxyResponseImplTest.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.when; + +import java.util.Map; + +import org.apache.commons.io.IOUtils; +import org.apache.http.Header; +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.entity.StringEntity; +import org.apache.http.message.BasicHeader; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +/** + * + */ +@RunWith(MockitoJUnitRunner.class) +public class ProxyResponseImplTest { + + @Mock + private HttpResponse response; + + @Mock + private StatusLine statusLine; + + @Before + public void before() { + when(response.getStatusLine()).thenReturn(statusLine); + when(statusLine.getStatusCode()).thenReturn(200); + when(statusLine.getReasonPhrase()).thenReturn("OK"); + } + + @Test + public void constructWithMultiValuedHeader() { + when(response.getAllHeaders()).thenReturn( + new Header[] { new BasicHeader("Accept", "text/plain"), + new BasicHeader("Accept", "text/html") }); + ProxyResponseImpl proxyResponse = new ProxyResponseImpl(200, "Ok", + response); + + Map proxyResponseHeaders = proxyResponse + .getResponseHeaders(); + assertEquals(2, proxyResponseHeaders.get("Accept").length); + } + + @Test + public void accessorsJustHandBackWhatIsOnTheMethodObject() throws Exception { + // given + int result = 200; + methodHasAResponseBody(); + when(response.getAllHeaders()).thenReturn( + new Header[0]); + + // when + ProxyResponseImpl proxyResponse = new ProxyResponseImpl(200, "ok", + response); + + // then + assertEquals(result, proxyResponse.getResultCode()); + IOUtils.toString(response.getEntity().getContent()); + assertEquals(IOUtils.toString(response.getEntity().getContent()), + proxyResponse.getResponseBodyAsString().trim()); + } + + @Test + public void throwsAwayJSESSIONIDCookie() { + // given + when(response.getAllHeaders()).thenReturn( + new Header[] { + new BasicHeader("set-cookie", "supercoolness=extreme"), + new BasicHeader("set-cookie", "JSESSIONID-30sdkf2-3dkfjsie") }); + + // when + ProxyResponseImpl proxyResponse = new ProxyResponseImpl(200, "OK", + response); + + // then + assertTrue(proxyResponse.getResponseHeaders().containsKey("set-cookie")); + for (String headerValue : proxyResponse.getResponseHeaders().get( + "set-cookie")) { + assertFalse(headerValue.contains("JSESSIONID")); + } + } + + private void methodHasAResponseBody() throws Exception { + String body = "Hello, world."; + when(response.getEntity()).thenReturn(new StringEntity(body)); + } + +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ProxyServletVivoMan.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ProxyServletVivoMan.java new file mode 100644 index 00000000..a910f1bc --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ProxyServletVivoMan.java @@ -0,0 +1,134 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Vector; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.io.output.ByteArrayOutputStream; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.runners.MockitoJUnitRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyPostProcessor; +import uk.co.tfd.sm.api.proxy.ProxyPreProcessor; +import uk.co.tfd.sm.template.TemplateServiceImpl; + +import com.google.common.collect.ImmutableMap; + +/** + * Needs a vivo instance to be available. + */ +@RunWith(MockitoJUnitRunner.class) +public class ProxyServletVivoMan { + + + private static final Logger LOGGER = LoggerFactory.getLogger(ProxyServletVivoMan.class); + + private ProxyServlet servlet; + + @Mock + private HttpServletRequest request; + + @Mock + private HttpServletResponse response; + + private Vector headerNames; + + + private Map proxyPreProcessors; + + @Mock + private ProxyPreProcessor proxyPreProcessor; + + private Map proxyPostProcessors; + + @Mock + private ProxyPostProcessor proxyPostProcessor; + + + private ProxyClientServiceImpl proxyClientService; + + @Before + public void setup() throws Exception { + servlet = new ProxyServlet(); + servlet.activate(ImmutableMap.of( + ProxyServlet.PROP_TEMPLATE_PATH, + (Object) "src/test/config")); + headerNames = new Vector(); + proxyPreProcessors = new HashMap(); + proxyPreProcessors.put("rss", proxyPreProcessor); + proxyPostProcessors = new HashMap(); + proxyPostProcessors.put("rss", proxyPostProcessor); + + + + proxyClientService = new ProxyClientServiceImpl(); + TemplateServiceImpl templateService = new TemplateServiceImpl(); + templateService.activate(ImmutableMap.of("x", (Object)"y")); + proxyClientService.templateService = templateService; + proxyClientService.activate(ImmutableMap.of("x",(Object)"y")); + servlet.proxyClientService = proxyClientService; + + } + + @Test + public void testVivoFeed() throws Exception { + + when(request.getPathInfo()).thenReturn("/tests/vivo"); + when(request.getHeaderNames()).thenReturn(headerNames.elements()); + + Map requestParameters = ImmutableMap.of("id", new String[]{"n7934"}); + when(request.getParameterMap()).thenReturn(requestParameters ); + final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); + + when(response.getOutputStream()).thenReturn(new ServletOutputStream() { + + @Override + public void write(int v) throws IOException { + byteArrayOutputStream.write(v); + } + }); + + long s = System.currentTimeMillis(); + servlet.service(request, response); + LOGGER.info("Took {} ",(System.currentTimeMillis()-s)); + + verify(response, Mockito.atMost(0)).sendError(404); + + + String output = byteArrayOutputStream.toString("UTF-8"); + LOGGER.info("Got Response {} ",output); + } + + +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/RDFToHTMLProxyPostProcessorTest.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/RDFToHTMLProxyPostProcessorTest.java new file mode 100644 index 00000000..f9bebde7 --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/RDFToHTMLProxyPostProcessorTest.java @@ -0,0 +1,69 @@ +package uk.co.tfd.sm.proxy; + +import java.io.IOException; +import java.io.InputStream; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.Map; + +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.io.IOUtils; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyResponse; +import uk.co.tfd.sm.template.TemplateServiceImpl; + +import com.google.common.collect.ImmutableMap; + +public class RDFToHTMLProxyPostProcessorTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(RDFToHTMLProxyPostProcessorTest.class); + + @Mock + private HttpServletResponse response; + + @Mock + private ProxyResponse proxyResponse; + + public RDFToHTMLProxyPostProcessorTest() { + MockitoAnnotations.initMocks(this); + } + + @Test + public void test() throws IOException { + InputStream in = this.getClass().getResourceAsStream("test.rdf"); + String rdfContent = IOUtils.toString(in); + Mockito.when(proxyResponse.getResponseBodyAsString()).thenReturn(rdfContent); + StringWriter outputWriter = new StringWriter(); + Mockito.when(response.getWriter()).thenReturn(new PrintWriter(outputWriter)); + RDFToHTMLProxyPostProcessor rp = new RDFToHTMLProxyPostProcessor(); + TemplateServiceImpl templateServiceImpl = new TemplateServiceImpl(); + templateServiceImpl.activate(null); + rp.templateService = templateServiceImpl; + StringBuilder ns = new StringBuilder(); + ns.append("rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#;"); + ns.append("vivocore=http://vivoweb.org/ontology/core#;"); + ns.append("http://vivo.tfd.co.uk/individual/;"); + ns.append("rdfs=http://www.w3.org/2000/01/rdf-schema#;"); + ns.append("vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#;"); + ns.append("foaf=http://xmlns.com/foaf/0.1/;"); + ns.append("owl=http://www.w3.org/2002/07/owl#;"); + + Map config = ImmutableMap.of( + RDFToHTMLProxyPostProcessor.CONFIG_NAMESPACEMAP, (Object)ns.toString(), + RDFToHTMLProxyPostProcessor.CONFIG_FINALTEMPLATE, "vivoprofile.vm", + RDFToHTMLProxyPostProcessor.CONFIG_RESULT_KEY, "id"); + Map templateParams = ImmutableMap.of("id", (Object)"n7934"); + rp.process(config, templateParams, response, proxyResponse); + String op = outputWriter.toString(); + LOGGER.info("Output {} ", op); + + } + +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/RDFToMapTest.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/RDFToMapTest.java new file mode 100644 index 00000000..936a1f3d --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/RDFToMapTest.java @@ -0,0 +1,38 @@ +package uk.co.tfd.sm.proxy; + +import java.io.InputStream; +import java.io.InputStreamReader; + +import javax.xml.stream.XMLStreamException; + +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; + +public class RDFToMapTest { + + private static final Logger LOGGER = LoggerFactory + .getLogger(RDFToMap.class); + + @Test + public void testRDFToMap() throws XMLStreamException { + InputStream in = this.getClass().getResourceAsStream("test.rdf"); + InputStreamReader reader = new InputStreamReader(in); + Builder b = ImmutableMap.builder(); + b.put("http://www.w3.org/1999/02/22-rdf-syntax-ns#", "rdf"); + b.put("http://vivoweb.org/ontology/core#", "vivocore"); + b.put("http://vivo.tfd.co.uk/individual/",""); + b.put("http://www.w3.org/2000/01/rdf-schema#", "rdfs"); + b.put("http://vitro.mannlib.cornell.edu/ns/vitro/0.7#","vitro"); + b.put("http://xmlns.com/foaf/0.1/","foaf"); + b.put("http://www.w3.org/2002/07/owl#", "owl"); + + RDFToMap rdfToMap = new RDFToMap(b.build()); + LOGGER.info("JSON = {} ",rdfToMap.readMap(reader).resolveToFullJson().toJson(true)); + } + + +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/RDFToResolvedJsonProxyPostProcessorTest.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/RDFToResolvedJsonProxyPostProcessorTest.java new file mode 100644 index 00000000..d9657442 --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/RDFToResolvedJsonProxyPostProcessorTest.java @@ -0,0 +1,62 @@ +package uk.co.tfd.sm.proxy; + +import java.io.IOException; +import java.io.InputStream; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.Map; + +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.io.IOUtils; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.proxy.ProxyResponse; + +import com.google.common.collect.ImmutableMap; + +public class RDFToResolvedJsonProxyPostProcessorTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(RDFToResolvedJsonProxyPostProcessorTest.class); + + @Mock + private HttpServletResponse response; + + @Mock + private ProxyResponse proxyResponse; + + public RDFToResolvedJsonProxyPostProcessorTest() { + MockitoAnnotations.initMocks(this); + } + + @Test + public void test() throws IOException { + InputStream in = this.getClass().getResourceAsStream("test.rdf"); + String rdfContent = IOUtils.toString(in); + Mockito.when(proxyResponse.getResponseBodyAsString()).thenReturn(rdfContent); + StringWriter outputWriter = new StringWriter(); + Mockito.when(response.getWriter()).thenReturn(new PrintWriter(outputWriter)); + RDFToResolvedJsonProxyPostProcessor rp = new RDFToResolvedJsonProxyPostProcessor(); + StringBuilder ns = new StringBuilder(); + ns.append("rdf=http://www.w3.org/1999/02/22-rdf-syntax-ns#;"); + ns.append("vivocore=http://vivoweb.org/ontology/core#;"); + ns.append("http://vivo.tfd.co.uk/individual/;"); + ns.append("rdfs=http://www.w3.org/2000/01/rdf-schema#;"); + ns.append("vitro=http://vitro.mannlib.cornell.edu/ns/vitro/0.7#;"); + ns.append("foaf=http://xmlns.com/foaf/0.1/;"); + ns.append("owl=http://www.w3.org/2002/07/owl#;"); + + Map config = ImmutableMap.of(RDFToResolvedJsonProxyPostProcessor.CONFIG_NAMESPACEMAP, (Object)ns.toString()); + Map templateParams = ImmutableMap.of(); + rp.process(config, templateParams, response, proxyResponse); + String op = outputWriter.toString(); + LOGGER.info("Output {} ", op); + + } + +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ResourceProxyServletTest.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ResourceProxyServletTest.java new file mode 100644 index 00000000..f0fdde74 --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/ResourceProxyServletTest.java @@ -0,0 +1,220 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.util.HashMap; +import java.util.Map; +import java.util.Vector; + +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.runners.MockitoJUnitRunner; + +import uk.co.tfd.sm.api.proxy.ProxyClientException; +import uk.co.tfd.sm.api.proxy.ProxyClientService; +import uk.co.tfd.sm.api.proxy.ProxyPostProcessor; +import uk.co.tfd.sm.api.proxy.ProxyPreProcessor; +import uk.co.tfd.sm.api.proxy.ProxyResponse; + +import com.google.common.collect.ImmutableMap; + +/** + * + */ +@RunWith(MockitoJUnitRunner.class) +public class ResourceProxyServletTest { + + private ProxyServlet servlet; + + @Mock + private HttpServletRequest request; + + @Mock + private HttpServletResponse response; + + private Vector headerNames; + + @Mock + private ProxyClientService proxyClientService; + + @Mock + private ProxyResponse proxyResponse; + + private Map proxyPreProcessors; + + @Mock + private ProxyPreProcessor proxyPreProcessor; + + private Map proxyPostProcessors; + + @Mock + private ProxyPostProcessor proxyPostProcessor; + + @Captor + private ArgumentCaptor> configCaptor; + + @Captor + private ArgumentCaptor> headersCaptor; + + @Captor + private ArgumentCaptor> inputMapCaptor; + + @Captor + private ArgumentCaptor requestInputStreamCaptor; + + @Captor + private ArgumentCaptor requestContentLengthCaptor; + + @Captor + private ArgumentCaptor requestContentTypeCaptor; + + @SuppressWarnings("unchecked") + @Before + public void setup() throws ProxyClientException, UnsupportedEncodingException, IOException { + servlet = new ProxyServlet(); + servlet.activate(ImmutableMap.of( + ProxyServlet.PROP_TEMPLATE_PATH, + (Object) "src/test/config")); + headerNames = new Vector(); + proxyPreProcessors = new HashMap(); + proxyPreProcessors.put("rss", proxyPreProcessor); + proxyPostProcessors = new HashMap(); + proxyPostProcessors.put("rss", proxyPostProcessor); + + + when(request.getPathInfo()).thenReturn("/tests/test1"); + when(request.getHeaderNames()).thenReturn(headerNames.elements()); + when( + proxyClientService.executeCall(Mockito.anyMap(), + Mockito.anyMap(), Mockito.anyMap(), + Mockito.any(InputStream.class), Mockito.anyLong(), + Mockito.anyString())).thenReturn(proxyResponse); + when(proxyResponse.getResponseBodyAsInputStream()).thenReturn( + new ByteArrayInputStream("TestData".getBytes("UTF-8"))); + when(response.getOutputStream()).thenReturn(new ServletOutputStream() { + + @Override + public void write(int arg0) throws IOException { + } + }); + + } + + @Test + public void returnsAProxiedGet() throws Exception { + + servlet.proxyClientService = proxyClientService; + servlet.service(request, response); + + verify(response, Mockito.atMost(0)).sendError(404); + verify(proxyClientService).executeCall(configCaptor.capture(), + headersCaptor.capture(), inputMapCaptor.capture(), + requestInputStreamCaptor.capture(), + requestContentLengthCaptor.capture(), + requestContentTypeCaptor.capture()); + } + + @Test + public void canDoHeaderBasicAuth() throws Exception { + servlet.proxyClientService = proxyClientService; + + // when + servlet.service(request, response); + + verify(response, Mockito.atMost(0)).sendError(404); + verify(proxyClientService).executeCall(configCaptor.capture(), + headersCaptor.capture(), inputMapCaptor.capture(), + requestInputStreamCaptor.capture(), + requestContentLengthCaptor.capture(), + requestContentTypeCaptor.capture()); + } + + @Test + public void canDoParameterBasicAuth() throws Exception { + servlet.proxyClientService = proxyClientService; + + // when + servlet.service(request, response); + verify(response, Mockito.atMost(0)).sendError(404); + verify(proxyClientService).executeCall(configCaptor.capture(), + headersCaptor.capture(), inputMapCaptor.capture(), + requestInputStreamCaptor.capture(), + requestContentLengthCaptor.capture(), + requestContentTypeCaptor.capture()); + } + + @Test + public void canPostWithAContentBody() throws Exception { + servlet.proxyClientService = proxyClientService; + + // when + servlet.service(request, response); + verify(response, Mockito.atMost(0)).sendError(404); + verify(proxyClientService).executeCall(configCaptor.capture(), + headersCaptor.capture(), inputMapCaptor.capture(), + requestInputStreamCaptor.capture(), + requestContentLengthCaptor.capture(), + requestContentTypeCaptor.capture()); + } + + @Test + public void canPutWithAContentBody() throws Exception { + servlet.proxyClientService = proxyClientService; + + // when + servlet.service(request, response); + verify(response, Mockito.atMost(0)).sendError(404); + verify(proxyClientService).executeCall(configCaptor.capture(), + headersCaptor.capture(), inputMapCaptor.capture(), + requestInputStreamCaptor.capture(), + requestContentLengthCaptor.capture(), + requestContentTypeCaptor.capture()); + } + + @Test + public void conveysParamsToTheProxy() throws Exception { + servlet.proxyClientService = proxyClientService; + + // when + servlet.service(request, response); + verify(response, Mockito.atMost(0)).sendError(404); + verify(proxyClientService).executeCall(configCaptor.capture(), + headersCaptor.capture(), inputMapCaptor.capture(), + requestInputStreamCaptor.capture(), + requestContentLengthCaptor.capture(), + requestContentTypeCaptor.capture()); + } + + +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/TrustedLoginTokenProxyPreProcessorTest.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/TrustedLoginTokenProxyPreProcessorTest.java new file mode 100644 index 00000000..6216ec36 --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/TrustedLoginTokenProxyPreProcessorTest.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +/** + * + */ +@RunWith(MockitoJUnitRunner.class) +public class TrustedLoginTokenProxyPreProcessorTest { + + private final String secret = "e2KS54H35j6vS5Z38nK40"; + + private TrustedLoginTokenProxyPreProcessor proxyPreProcessor; + + @Mock + HttpServletRequest request; + + + Map headers; + + Map templateParams; + + @Before + public void setup() throws Exception { + proxyPreProcessor = new TrustedLoginTokenProxyPreProcessor(); + headers = Maps.newHashMap(); + templateParams = Maps.newHashMap(); + } + + @Test + public void nameIsAsExpected() { + assertEquals("trusted-token", proxyPreProcessor.getName()); + } + + @Test + public void processorAddsValidHashToHeaders() throws Exception { + // when + proxyPreProcessor.preProcessRequest(request, headers, templateParams); + + // then + assertNotNull(headers + .get(TrustedLoginTokenProxyPreProcessor.SECURE_TOKEN_HEADER_NAME)); + String[] tokenParts = ((String) headers.get( + TrustedLoginTokenProxyPreProcessor.SECURE_TOKEN_HEADER_NAME)).split(";"); + String theirHash = tokenParts[0]; + assertEquals(theirHash, myHash(tokenParts)); + + } + + @Test + public void reflectsPortParameterAsConfigured() { + Map props = ImmutableMap.of("port",(Object)"8080"); + proxyPreProcessor.activate(props); + proxyPreProcessor.preProcessRequest(request, headers, templateParams); + + // then + assertEquals(8080, templateParams.get("port")); + } + + + private String myHash(String[] tokenParts) throws Exception { + String user = tokenParts[1]; + String timestamp = tokenParts[2]; + final String message = user + + TrustedLoginTokenProxyPreProcessor.TOKEN_SEPARATOR + timestamp; + final String hmac = Signature.calculateRFC2104HMAC(message, secret); + return hmac; + } + + +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/http/CapturedRequest.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/http/CapturedRequest.java new file mode 100644 index 00000000..ad04ca77 --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/http/CapturedRequest.java @@ -0,0 +1,77 @@ +package uk.co.tfd.sm.proxy.http; + + +import java.io.IOException; +import java.io.InputStream; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +/** + * + */ +public class CapturedRequest { + + private Map headers = new HashMap(); + private String requestBody; + private String contentType; + private String method; + private byte[] byteBody; + + /** + * @param request + * @throws IOException + */ + public CapturedRequest(HttpServletRequest request) throws IOException { + for (Enumeration names = request.getHeaderNames(); names.hasMoreElements();) { + String name = (String) names.nextElement(); + headers.put(name, request.getHeader(name)); + } + contentType = request.getContentType(); + if (request.getContentLength() > 0) { + byteBody = new byte[request.getContentLength()]; + InputStream in = request.getInputStream(); + in.read(byteBody); + requestBody = new String(byteBody); + } + method = request.getMethod(); + } + + /** + * @param string + * @return + */ + public String getHeader(String name) { + return headers.get(name); + } + + /** + * @return + */ + public String getContentType() { + return contentType; + } + + /** + * @return the requestBody + */ + public String getRequestBody() { + return requestBody; + } + + /** + * @return the method + */ + public String getMethod() { + return method; + } + + /** + * @return + */ + public byte[] getRequestBodyAsByteArray() { + return byteBody; + } +} diff --git a/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/http/DummyServer.java b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/http/DummyServer.java new file mode 100644 index 00000000..5a2bdec2 --- /dev/null +++ b/extensions/proxy/src/test/java/uk/co/tfd/sm/proxy/http/DummyServer.java @@ -0,0 +1,182 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.proxy.http; + +import org.apache.commons.io.IOUtils; +import org.mortbay.jetty.Request; +import org.mortbay.jetty.Server; +import org.mortbay.jetty.handler.AbstractHandler; + +import java.io.IOException; +import java.io.InputStream; +import java.net.ServerSocket; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +/** + * Create a HTTP server on the next available port between 8888 and 8988 that will respond + * with the configured request to any request. Should be run single threaded. + */ +public class DummyServer extends AbstractHandler { + + /** + * The next body response to send. + */ + private String responseBody = "Hello"; + /** + * The server object. + */ + private Server server; + /** + * The port on which the server is listening. + */ + private int port; + /** + * The next content type to respond with. + */ + private String contentType = "text/plain"; + /** + * The next status to send. + */ + private int status = 200; + /** + * The last captured request. + */ + private CapturedRequest request; + + /** + * Create the dummy server on the first available port. There will be 100 tries before + * we stop trying. + */ + public DummyServer() { + int attempts = 0; + while (server == null) { + try { + // new ServerSocket(0) will automatically try to find a free port. + ServerSocket socket = new ServerSocket(0); + port = socket.getLocalPort(); + socket.close(); + + server = new Server(port); + server.setHandler(this); + server.start(); + break; + } catch (Exception e) { + if (server != null) { + try { + server.stop(); + server.destroy(); + } catch (Exception ex2) { + } + } + server = null; + } + attempts++; + if (attempts == 100) { + throw new RuntimeException( + "Unable to find a free port the range 8888 - 8988, aborting http server startup "); + } + } + } + + /** + * Close the server down, releasing resources. + */ + public void close() { + if (server != null) { + try { + server.stop(); + } catch (Exception e) { + } + server.destroy(); + server = null; + } + } + + /** + * @return the current URL that the server is listening on. + */ + public String getUrl() { + return "http://localhost:" + port + "/test"; + } + + /** + * {@inheritDoc} + * + * @see org.mortbay.jetty.Handler#handle(java.lang.String, + * javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, + * int) + */ + public void handle(String target, HttpServletRequest request, + HttpServletResponse response, int dispatch) throws IOException, ServletException { + + this.request = new CapturedRequest(request); + + response.setContentType(contentType); + response.setStatus(status); + response.getWriter().print(responseBody); + ((Request) request).setHandled(true); + } + + /** + * @param contentType + * the contentType to set + */ + public void setContentType(String contentType) { + this.contentType = contentType; + } + + /** + * @param status + * the status to set + */ + public void setStatus(int status) { + this.status = status; + } + + /** + * @param responsebody + * the responsebody to set + */ + public void setResponseBody(String responseBody) { + this.responseBody = responseBody; + } + + public void setResponseBodyFromFile(String filename) throws IOException { + InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( + filename); + if (is == null) { + throw new IOException("No such file " + filename); + } + this.responseBody = IOUtils.toString(is); + } + + /** + * @return the request + */ + public CapturedRequest getRequest() { + return request; + } + + public int getPort() { + return port; + } + +} diff --git a/extensions/proxy/src/test/resources/invalid-sample-rss.xml b/extensions/proxy/src/test/resources/invalid-sample-rss.xml new file mode 100644 index 00000000..9d6dc82d --- /dev/null +++ b/extensions/proxy/src/test/resources/invalid-sample-rss.xml @@ -0,0 +1,150 @@ + + + + RSS is a fascinating technology. The uses for RSS are expanding daily. Take a closer look at how various industries are using the benefits of RSS in their businesses. + http://www.feedforall.com/industry-solutions.htm + Computers/Software/Internet/Site Management/Content Management + Copyright 2004 NotePage, Inc. + http://blogs.law.harvard.edu/tech/rss + en-us + Tue, 19 Oct 2004 13:39:14 -0400 + marketing@feedforall.com + Tue, 19 Oct 2004 13:38:55 -0400 + webmaster@feedforall.com + FeedForAll Beta1 (0.0.1.8) + + http://www.feedforall.com/ffalogo48x48.gif + FeedForAll Sample Feed + http://www.feedforall.com/industry-solutions.htm + FeedForAll Sample Feed + 48 + 48 + + + RSS Solutions for Restaurants + <b>FeedForAll </b>helps Restaurant's communicate with customers. Let your customers know the latest specials or events.<br> +<br> +RSS feed uses include:<br> +<i><font color="#FF0000">Daily Specials <br> +Entertainment <br> +Calendar of Events </i></font> + http://www.feedforall.com/restaurant.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:11 -0400 + + + RSS Solutions for Schools and Colleges + FeedForAll helps Educational Institutions communicate with students about school wide activities, events, and schedules.<br> +<br> +RSS feed uses include:<br> +<i><font color="#0000FF">Homework Assignments <br> +School Cancellations <br> +Calendar of Events <br> +Sports Scores <br> +Clubs/Organization Meetings <br> +Lunches Menus </i></font> + http://www.feedforall.com/schools.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:09 -0400 + + + RSS Solutions for Computer Service Companies + FeedForAll helps Computer Service Companies communicate with clients about cyber security and related issues. <br> +<br> +Uses include:<br> +<i><font color="#0000FF">Cyber Security Alerts <br> +Specials<br> +Job Postings </i></font> + http://www.feedforall.com/computer-service.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:07 -0400 + + + RSS Solutions for Governments + FeedForAll helps Governments communicate with the general public about positions on various issues, and keep the community aware of changes in important legislative issues. <b><i><br> +</b></i><br> +RSS uses Include:<br> +<i><font color="#00FF00">Legislative Calendar<br> +Votes<br> +Bulletins</i></font> + http://www.feedforall.com/government.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:05 -0400 + + + RSS Solutions for Politicians + FeedForAll helps Politicians communicate with the general public about positions on various issues, and keep the community notified of their schedule. <br> +<br> +Uses Include:<br> +<i><font color="#FF0000">Blogs<br> +Speaking Engagements <br> +Statements<br> + </i></font> + http://www.feedforall.com/politics.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:03 -0400 + + + RSS Solutions for Meteorologists + FeedForAll helps Meteorologists communicate with the general public about storm warnings and weather alerts, in specific regions. Using RSS meteorologists are able to quickly disseminate urgent and life threatening weather warnings. <br> +<br> +Uses Include:<br> +<i><font color="#0000FF">Weather Alerts<br> +Plotting Storms<br> +School Cancellations </i></font> + http://www.feedforall.com/weather.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:01 -0400 + + + RSS Solutions for Realtors & Real Estate Firms + FeedForAll helps Realtors and Real Estate companies communicate with clients informing them of newly available properties, and open house announcements. RSS helps to reach a targeted audience and spread the word in an inexpensive, professional manner. <font color="#0000FF"><br> +</font><br> +Feeds can be used for:<br> +<i><font color="#FF0000">Open House Dates<br> +New Properties For Sale<br> +Mortgage Rates</i></font> + http://www.feedforall.com/real-estate.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:08:59 -0400 + + + RSS Solutions for Banks / Mortgage Companies + FeedForAll helps <b>Banks, Credit Unions and Mortgage companies</b> communicate with the general public about rate changes in a prompt and professional manner. <br> +<br> +Uses include:<br> +<i><font color="#0000FF">Mortgage Rates<br> +Foreign Exchange Rates <br> +Bank Rates<br> +Specials</i></font> + http://www.feedforall.com/banks.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:08:57 -0400 + + + RSS Solutions for Law Enforcement + <b>FeedForAll</b> helps Law Enforcement Professionals communicate with the general public and other agencies in a prompt and efficient manner. Using RSS police are able to quickly disseminate urgent and life threatening information. <br> +<br> +Uses include:<br> +<i><font color="#0000FF">Amber Alerts<br> +Sex Offender Community Notification <br> +Weather Alerts <br> +Scheduling <br> +Security Alerts <br> +Police Report <br> +Meetings</i></font> + http://www.feedforall.com/law-enforcement.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:08:56 -0400 + + + \ No newline at end of file diff --git a/extensions/proxy/src/test/resources/invalid-xml.xml b/extensions/proxy/src/test/resources/invalid-xml.xml new file mode 100644 index 00000000..50998872 --- /dev/null +++ b/extensions/proxy/src/test/resources/invalid-xml.xml @@ -0,0 +1,36 @@ + + + + RSS is a fascinating technology. The uses for RSS are expanding daily. Take a closer look at how various industries are using the benefits of RSS in their businesses. + http://www.feedforall.com/industry-solutions.htm + Computers/Software/Internet/Site Management/Content Management + Copyright 2004 NotePage, Inc. + http://blogs.law.harvard.edu/tech/rss + en-us + Tue, 19 Oct 2004 13:39:14 -0400 + marketing@feedforall.com + Tue, 19 Oct 2004 13:38:55 -0400 + webmaster@feedforall.com + FeedForAll Beta1 (0.0.1.8) + + http://www.feedforall.com/ffalogo48x48.gif + FeedForAll Sample Feed + http://www.feedforall.com/industry-solutions.htm + FeedForAll Sample Feed + 48 + 48 + + + RSS Solutions for Restaurants + <b>FeedForAll </b>helps Restaurant's communicate with customers. Let your customers know the latest specials or events.<br> +<br> +RSS feed uses include:<br> +<i><font color="#FF0000">Daily Specials <br> +Entertainment <br> +Calendar of Events </i></font> + http://www.feedforall.com/restaurant.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:11 -0400 + + \ No newline at end of file diff --git a/extensions/proxy/src/test/resources/sample-rss.xml b/extensions/proxy/src/test/resources/sample-rss.xml new file mode 100644 index 00000000..57ea10d5 --- /dev/null +++ b/extensions/proxy/src/test/resources/sample-rss.xml @@ -0,0 +1,151 @@ + + + + FeedForAll Sample Feed + RSS is a fascinating technology. The uses for RSS are expanding daily. Take a closer look at how various industries are using the benefits of RSS in their businesses. + http://www.feedforall.com/industry-solutions.htm + Computers/Software/Internet/Site Management/Content Management + Copyright 2004 NotePage, Inc. + http://blogs.law.harvard.edu/tech/rss + en-us + Tue, 19 Oct 2004 13:39:14 -0400 + marketing@feedforall.com + Tue, 19 Oct 2004 13:38:55 -0400 + webmaster@feedforall.com + FeedForAll Beta1 (0.0.1.8) + + http://www.feedforall.com/ffalogo48x48.gif + FeedForAll Sample Feed + http://www.feedforall.com/industry-solutions.htm + FeedForAll Sample Feed + 48 + 48 + + + RSS Solutions for Restaurants + <b>FeedForAll </b>helps Restaurant's communicate with customers. Let your customers know the latest specials or events.<br> +<br> +RSS feed uses include:<br> +<i><font color="#FF0000">Daily Specials <br> +Entertainment <br> +Calendar of Events </i></font> + http://www.feedforall.com/restaurant.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:11 -0400 + + + RSS Solutions for Schools and Colleges + FeedForAll helps Educational Institutions communicate with students about school wide activities, events, and schedules.<br> +<br> +RSS feed uses include:<br> +<i><font color="#0000FF">Homework Assignments <br> +School Cancellations <br> +Calendar of Events <br> +Sports Scores <br> +Clubs/Organization Meetings <br> +Lunches Menus </i></font> + http://www.feedforall.com/schools.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:09 -0400 + + + RSS Solutions for Computer Service Companies + FeedForAll helps Computer Service Companies communicate with clients about cyber security and related issues. <br> +<br> +Uses include:<br> +<i><font color="#0000FF">Cyber Security Alerts <br> +Specials<br> +Job Postings </i></font> + http://www.feedforall.com/computer-service.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:07 -0400 + + + RSS Solutions for Governments + FeedForAll helps Governments communicate with the general public about positions on various issues, and keep the community aware of changes in important legislative issues. <b><i><br> +</b></i><br> +RSS uses Include:<br> +<i><font color="#00FF00">Legislative Calendar<br> +Votes<br> +Bulletins</i></font> + http://www.feedforall.com/government.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:05 -0400 + + + RSS Solutions for Politicians + FeedForAll helps Politicians communicate with the general public about positions on various issues, and keep the community notified of their schedule. <br> +<br> +Uses Include:<br> +<i><font color="#FF0000">Blogs<br> +Speaking Engagements <br> +Statements<br> + </i></font> + http://www.feedforall.com/politics.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:03 -0400 + + + RSS Solutions for Meteorologists + FeedForAll helps Meteorologists communicate with the general public about storm warnings and weather alerts, in specific regions. Using RSS meteorologists are able to quickly disseminate urgent and life threatening weather warnings. <br> +<br> +Uses Include:<br> +<i><font color="#0000FF">Weather Alerts<br> +Plotting Storms<br> +School Cancellations </i></font> + http://www.feedforall.com/weather.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:09:01 -0400 + + + RSS Solutions for Realtors & Real Estate Firms + FeedForAll helps Realtors and Real Estate companies communicate with clients informing them of newly available properties, and open house announcements. RSS helps to reach a targeted audience and spread the word in an inexpensive, professional manner. <font color="#0000FF"><br> +</font><br> +Feeds can be used for:<br> +<i><font color="#FF0000">Open House Dates<br> +New Properties For Sale<br> +Mortgage Rates</i></font> + http://www.feedforall.com/real-estate.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:08:59 -0400 + + + RSS Solutions for Banks / Mortgage Companies + FeedForAll helps <b>Banks, Credit Unions and Mortgage companies</b> communicate with the general public about rate changes in a prompt and professional manner. <br> +<br> +Uses include:<br> +<i><font color="#0000FF">Mortgage Rates<br> +Foreign Exchange Rates <br> +Bank Rates<br> +Specials</i></font> + http://www.feedforall.com/banks.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:08:57 -0400 + + + RSS Solutions for Law Enforcement + <b>FeedForAll</b> helps Law Enforcement Professionals communicate with the general public and other agencies in a prompt and efficient manner. Using RSS police are able to quickly disseminate urgent and life threatening information. <br> +<br> +Uses include:<br> +<i><font color="#0000FF">Amber Alerts<br> +Sex Offender Community Notification <br> +Weather Alerts <br> +Scheduling <br> +Security Alerts <br> +Police Report <br> +Meetings</i></font> + http://www.feedforall.com/law-enforcement.htm + Computers/Software/Internet/Site Management/Content Management + http://www.feedforall.com/forum + Tue, 19 Oct 2004 11:08:56 -0400 + + + \ No newline at end of file diff --git a/extensions/proxy/src/test/resources/uk/co/tfd/sm/proxy/test.rdf b/extensions/proxy/src/test/resources/uk/co/tfd/sm/proxy/test.rdf new file mode 100644 index 00000000..420f117e --- /dev/null +++ b/extensions/proxy/src/test/resources/uk/co/tfd/sm/proxy/test.rdf @@ -0,0 +1,55 @@ + + + + + + + + + yearPrecision + + + Dr + ieb@tfd.co.uk + + Boston + Ian + + + Boston, Ian + + + + + + + + + + + 2001-01-01T00:00:00 + + + + + + + + + + + CTO + + + Parallel Computing + + + Caret + + diff --git a/extensions/proxy/src/test/resources/vivoprofile.vm b/extensions/proxy/src/test/resources/vivoprofile.vm new file mode 100644 index 00000000..ff419161 --- /dev/null +++ b/extensions/proxy/src/test/resources/vivoprofile.vm @@ -0,0 +1,14 @@ + +
    +
    Profile of
    ${result.rdfs_label}
    +
    Title
    ${result.vivocore_preferredTitle}
    +
    LastName
    ${result.foaf_lastName}
    +
    FirstName
    ${result.foaf_firstName}
    +
    Primary Email
    ${result.vivocore_primaryEmail}
    +
    Role
    ${result.vitro_mostSpecificType}
    +
    Research Area
    ${result.vivocore_hasResearchArea.rdfs_label}
    +
    Possition
    ${result.vivocore_personInPosition.rdfs_label}
    +
    Organiza
    ${result.vivocore_personInPosition.vivocore_positionInOrganization.rdfs_label}
    +
    Possition Type
    ${result.vivocore_personInPosition.vitro_mostSpecificType}
    + + diff --git a/extensions/resource/DesignNotes.md b/extensions/resource/DesignNotes.md new file mode 100644 index 00000000..be641a2b --- /dev/null +++ b/extensions/resource/DesignNotes.md @@ -0,0 +1,16 @@ +# Request Processing + +Request Processing for Posts is performed by the ModificationRequest class. It does the processing according to the protocol, converting the post parameters from string into the types that have been requested. At the end of processing, the ModificationRequest can be passed to a to a Helper to apply those accumulated changes to an object. + +The ModificationRequest class streams the request, and if its provided with a RequestStreamProcessorImplementation, it will call for every non form field, giving it a stream for a body. File uploads are handled like this. We could handle other types of multi operations in the same way. + + +Multi Operations to update content, authorizables, access control (ie not batch post operations) +I think the best way of handling multiple operations is to use multipart posts containing application/x-www-form-urlencoded parts + +The name of the part is the path to the content to be updated. +The content is the post to that content. + +This makes it possible for a single post operation to update many content items in sequence. + + diff --git a/extensions/resource/README.md b/extensions/resource/README.md new file mode 100644 index 00000000..317780d8 --- /dev/null +++ b/extensions/resource/README.md @@ -0,0 +1,72 @@ +This bundle provides resource processing support and basic protocol support for +POST and GET operations. + +# Resource Processing Support + +Processing is managed by JAX-RS bean (uk.co.tfd.sm.resource.DefaultResourceHandler) that implements uk.co.tfd.sm.api.jaxrs.JaxRestService. This is registered by OSGi with the JAX-RS provider bundle. The path is set to /, namely all requests. + +The DefaultResourceHandler uses a JAX-RS annotation to associate uk.co.tfd.sm.resource.DefaultResourceHandler.getResource(HttpServletRequest, HttpServletResponse, String) with all requests. That initiates a session if one is already associated with the request or authenticates a new session if there is no active session associated with the request. Having got a session it takes the URL and parses it looking for a match in the content system. + +The last part of the URL is parsed first, splitting on '.', removing more and more of the last element of the URL looking for an exact match. If no match is found, then the path is split on '/', searching for matching resources. When a match is found, even if its a root match, then a ResourceImpl is created that defines the match. That ResourceImpl is passed to the ResponseFactoryManagerImpl to find a suitable JAX-RS response bean for the Resource. The ResponseFactoryManagerImpl builds a hash of the characteristics of the Resource based on the path, the underlying resource, selectors, extension and the HTTP method. This hash is then used as a key to lookup a list of ResponseFactoryies that might provide a JAX-RS response bean for the for the Resource. ResponseFactgory(s) are comparable and the most significant ResponseFactory is used to create a Response bean for the Resource. This is a 1 step operation. + +The Bean that is produced is a JAX-RS Bean that is returned by the getResource method at which point the JAX-RS provider takes over an inspects the annotations on the bean to complete processing. Those annotations may be anything that JAX-RS supports as we have made no assertions about the request in finding the Resource or binding a Response to the Resource. The Response bean has access to the parent Adaptables giving it the ability to Adapt to anything that any parent adaptable adapts to. eg Session, ContentManager, Request, Response etc. + +# Default Protocol + +Resource parsing protocol is derived from Apache Sling. +Urls as split into three parts. The resource path, selectors and the extension. Selectors and extensions are only relevant in the last path element of the Url after the resource path. If after removing the resource path and selecting the remainder of the last element of the URL, there are no '.', then the remainder is an extension and there are no selectors. If the remainder has dots then the last element of that dotted string is the extension and the previous ones are selectors + +Some examples + +* /a/resource/path/to/a/resource.selector.selector.extension +* /a/resource/path/to/a/resource.selector.extension +* /a/resource/path/to/a/resource.extension +* /a/resource/path/to/a/resource +* /a/reso.urce/pa.th/t.o/a/resource +* /a/reso.urce/pa.th/t.o/a/resource/with/some/extra/path/selector.selector.extension + +The last example shows a resource with extra path info associated with the resource. The selectors and extensions are always taken from the last element. + + +## GET +* If the URL matches the resource the body is streamed +* If the URL does not match the resource some other action is performed depending on the selectors an extensions. + * An extension of json sends the properties of the resource out as a json stream. + * An extension of xml sends the properties of the resource out as an xml stream. + * A selector that is a number causes that stream to iterate that number of levels into the content tree. + * A selector of -1 or infinity causes that stream to contain the entire subtree + * A selector of tidy or pp (pretty print) formats + + +## POST +* The URL identifies the resource +* If the post is a standard post the names of the POST parameters are the properties and the values are the values. +* Standard POSTS (uri encoded) + * Multiple POST parameters of the same name automatically convert the property into an array. + * A property can be forced to become an array by appending [] to the POST parameter name eg myarray[] + * A property can be forced to have a type by appending the type name eg myinteger@Integer + * Available types are registered in uk.co.tfd.sm.resource.RequestUtils.TYPE_CLASSES + * Types implement uk.co.tfd.sm.resource.types.RequestParameterType + * The conversion from POST parameter string to the internal Type is type specific. +* Multipart Posts + * If the part of the multipart post is a form field, then its processes as if it was from a standard post + * If the part of the post is a stream body, the name of the part is the name of the child node and the body is streamed to that node. + * If the name contains a @ then the second part of the name is the alternative stream name where the body is stored. eg fileName@alternativeStream1 + * A client may send as as many body parts as they desire in the request intermixed with as many property values, however the client should be aware that the request will be processed in sequence and streams will be saved to their final destination before all property values are committed. + +Note: the POST implementation uses Request Streaming and connects the HTTP socket directly to the final output stream with minimal buffering. There is no intermediate save to disk or save to memory. + + +## Standard Types +* Integer, Double, String, Long, Boolean all use the standard Java type representation. +* Calendar uses ISO8601Date which will parse any valid ISO8601Date and store either date or timestamp information honoring Timzone and daylight saving. + +## Extending + +At present this bundle does not support alternative resource providers. Its been written the way it has to be fast and simple. To add a different repository, write a new JaxRestService implementation and register it at a new location. +To add ResponseFacotries that implement alternative functionality, write a new ResponseFactoryImplementation, annotate it with the Resource characteristics it should respond to and register it with OSGi. This can/should be done in a separate bundle. + + +## Integration + +There are integration tests under src/test/java in package uk.co.tfd.sm.integration.resource that are run with mvn -Pintegration test. And unit test that contains integration in the package name will get run by that profile. Here these integration tests test the above protocol in a live server over HTTP. They include tests for all the data types, file streaming and full tests UTF8 upto 3 byte charsets. \ No newline at end of file diff --git a/extensions/resource/pom.xml b/extensions/resource/pom.xml new file mode 100644 index 00000000..5214ff12 --- /dev/null +++ b/extensions/resource/pom.xml @@ -0,0 +1,116 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + uk.co.tfd.sm.resource + bundle + 0.1-SNAPSHOT + Sparse Map :: Resource Processing + Provides Resource processing support. + + + + org.apache.felix + maven-scr-plugin + + + org.apache.felix + maven-bundle-plugin + true + + + sparse-map + uk.co.tfd.sm.api.resource.*, uk.co.tfd.sm.api.authn.*, uk.co.tfd.sm.util.* + uk.co.tfd.sm.resource.*, uk.co.tfd.sm.authn.*, uk.co.tfd.sm.authorizables.* + + com.google.common.collect; version="9.0.0", + * + + true + + + + + + + + + org.slf4j + slf4j-simple + 1.5.10 + + + org.apache.felix + org.apache.felix.scr.annotations + + + com.googlecode.guava-osgi + guava-osgi + 9.0.0 + + + junit + junit + 4.4 + jar + compile + + + javax.servlet + servlet-api + 2.4 + jar + compile + + + org.mockito + mockito-all + 1.8.5 + jar + test + + + org.jboss.resteasy + jaxrs-api + 2.2.3.GA + + + org.sakaiproject.nakamura + uk.co.tfd.sm.jaxrs + 0.1-SNAPSHOT + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5.1-SNAPSHOT + + + com.google.code.gson + gson + 1.7.1 + + + commons-fileupload + commons-fileupload + 1.2.2 + + + org.sakaiproject.nakamura + uk.co.tfd.sm.integration + 0.1-SNAPSHOT + test + + + + + jboss + http://repository.jboss.org/nexus/content/groups/public/ + + + diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/authn/AuthenticationService.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/authn/AuthenticationService.java new file mode 100644 index 00000000..da188fc0 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/authn/AuthenticationService.java @@ -0,0 +1,12 @@ +package uk.co.tfd.sm.api.authn; + +import javax.servlet.http.HttpServletRequest; + +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; + +public interface AuthenticationService { + + Session authenticate(HttpServletRequest request) throws StorageClientException; + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/authn/AuthenticationServiceCredentials.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/authn/AuthenticationServiceCredentials.java new file mode 100644 index 00000000..097dc80d --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/authn/AuthenticationServiceCredentials.java @@ -0,0 +1,9 @@ +package uk.co.tfd.sm.api.authn; + +public interface AuthenticationServiceCredentials { + + String getUserName(); + + String getPassword(); + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/authn/AuthenticationServiceHandler.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/authn/AuthenticationServiceHandler.java new file mode 100644 index 00000000..b00288af --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/authn/AuthenticationServiceHandler.java @@ -0,0 +1,9 @@ +package uk.co.tfd.sm.api.authn; + +import javax.servlet.http.HttpServletRequest; + +public interface AuthenticationServiceHandler { + + AuthenticationServiceCredentials getCredentials(HttpServletRequest request); + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/Adaptable.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/Adaptable.java new file mode 100644 index 00000000..540f884e --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/Adaptable.java @@ -0,0 +1,20 @@ +package uk.co.tfd.sm.api.resource; + +/** + * Adaptables can adapt to other things. To achieve the adaption the adaptable + * may chain to other Adaptables. + * + * @author ieb + * + */ +public interface Adaptable { + + /** + * Adapt this adaptable to the type requested. + * @param + * @param type + * @return the requested type or null if not possible. + */ + T adaptTo(Class type); + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/Resource.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/Resource.java new file mode 100644 index 00000000..032cf999 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/Resource.java @@ -0,0 +1,26 @@ +package uk.co.tfd.sm.api.resource; + +/** + * Represents a resource. + * @author ieb + * + */ +public interface Resource extends Adaptable { + + String getResolvedPath(); + + String getRequestPath(); + + String getPathInfo(); + + String[] getRequestSelectors(); + + String getRequestExt(); + + String getRequestName(); + + String getResourceType(); + + String getToCreatePath(); + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResourceErrorException.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResourceErrorException.java new file mode 100644 index 00000000..9c302cb4 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResourceErrorException.java @@ -0,0 +1,21 @@ +package uk.co.tfd.sm.api.resource; + +/** + * An error performing some operation on the resource. In the http context this + * should generate a 500 status response. + * + * @author ieb + * + */ +public class ResourceErrorException extends RuntimeException { + + /** + * + */ + private static final long serialVersionUID = -4860594814908186212L; + + public ResourceErrorException(String message, Exception e) { + super(message, e); + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResourceForbiddenException.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResourceForbiddenException.java new file mode 100644 index 00000000..87f68462 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResourceForbiddenException.java @@ -0,0 +1,20 @@ +package uk.co.tfd.sm.api.resource; + +/** + * Access to the Resource in the mode attempted was forbidden. + * + * @author ieb + * + */ +public class ResourceForbiddenException extends RuntimeException { + + /** + * + */ + private static final long serialVersionUID = 5403610617596262186L; + + public ResourceForbiddenException(String message, Exception e) { + super(message, e); + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResponseFactory.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResponseFactory.java new file mode 100644 index 00000000..88167217 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResponseFactory.java @@ -0,0 +1,33 @@ +package uk.co.tfd.sm.api.resource; + +import uk.co.tfd.sm.api.resource.binding.ResponseBindingList; + +/** + * A factory for responses. + * + * @author ieb + * + */ +public interface ResponseFactory extends Comparable { + + /** + * @return any dynamic bindings that the ResponseFactory might decide to + * have. Static bindings are specified with the annotations + * {@link ResponseBindings} and {@link ResponseBinding} + */ + ResponseBindingList getBindings(); + + /** + * Get the response as an Adaptable. The response is handed back to JAX-RS + * for further processing and should contain JAX-RS annotations to control + * the processing. + * + * @param resource + * @return a response class instance as an adaptable. If this is no a per + * request instance, it must be thread safe. The normal approach is + * to create a new instance, bind it to the current parent + * adaptable, and hand it back. + */ + Adaptable getResponse(Adaptable resource); + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResponseFactoryManager.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResponseFactoryManager.java new file mode 100644 index 00000000..7ec6a789 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/ResponseFactoryManager.java @@ -0,0 +1,22 @@ +package uk.co.tfd.sm.api.resource; + +import uk.co.tfd.sm.api.resource.Adaptable; + +/** + * Manages the creation of Responses stimulated by Adaptables. + * + * @author ieb + * + */ +public interface ResponseFactoryManager { + + /** + * Create the response as an adaptable routing the response creation request + * to the most suitable {@link ResponseFactory} + * + * @param resource the resource to base this reponse on, as an adaptable. + * @return the response that can handle the request, as an adaptable. + */ + Adaptable createResponse(Adaptable resource); + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/SafeMethodResponse.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/SafeMethodResponse.java new file mode 100644 index 00000000..cd65b263 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/SafeMethodResponse.java @@ -0,0 +1,16 @@ +package uk.co.tfd.sm.api.resource; + +import java.util.Set; + +import com.google.common.collect.ImmutableSet; + + +/** + * Indicates the adapatable does not make modifications. + * @author ieb + * + */ +public interface SafeMethodResponse extends Adaptable { + + public static Set COMPATABLE_METHODS = ImmutableSet.of("GET", "HEAD"); +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/ResponseBinding.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/ResponseBinding.java new file mode 100644 index 00000000..99a61c5a --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/ResponseBinding.java @@ -0,0 +1,40 @@ +package uk.co.tfd.sm.api.resource.binding; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.lang.annotation.Inherited; + +import uk.co.tfd.sm.resource.BindingSearchKey; + + +/** + * Annotation to define how a Response is bound. + * @author ieb + * + */ +@Inherited +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +public @interface ResponseBinding { + + String ANY = BindingSearchKey.ANY; + + /** + * @return array of methods that the binding is bound to, if missing bound to ANY methods + */ + String[] method(); + /** + * @return array of types that the binding is bound to, if missing, ANY. + */ + String[] type(); + /** + * @return the selectors, if missing, none, any one of the selectors will match. + */ + String[] selectors(); + /** + * @return the extensions, one must match, if missing none. + */ + String[] extension(); +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/ResponseBindingList.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/ResponseBindingList.java new file mode 100644 index 00000000..cd04addc --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/ResponseBindingList.java @@ -0,0 +1,26 @@ +package uk.co.tfd.sm.api.resource.binding; + +import java.util.Iterator; +import java.util.List; + +import com.google.common.collect.ImmutableList; + + +/** + * A list of response bindings. + * @author ieb + * + */ +public class ResponseBindingList implements Iterable { + + private List list; + + public ResponseBindingList(RuntimeResponseBinding ... bindings) { + list = ImmutableList.copyOf(bindings); + } + + public Iterator iterator() { + return list.iterator(); + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/ResponseBindings.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/ResponseBindings.java new file mode 100644 index 00000000..b8bf541a --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/ResponseBindings.java @@ -0,0 +1,25 @@ +package uk.co.tfd.sm.api.resource.binding; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + + +/** + * Container for a list of response bindings. + * @author ieb + * + */ +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +@Documented +public @interface ResponseBindings { + /** + * @return list of response bindings to be applied to the output of the ResponseFactory + */ + ResponseBinding[] value(); +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/RuntimeResponseBinding.java b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/RuntimeResponseBinding.java new file mode 100644 index 00000000..272089b4 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/api/resource/binding/RuntimeResponseBinding.java @@ -0,0 +1,51 @@ +package uk.co.tfd.sm.api.resource.binding; + +import java.util.Set; + +import uk.co.tfd.sm.resource.BindingSearchKey; + +import com.google.common.collect.Sets; + +/** + * A response binding. + * @author ieb + * + */ +public class RuntimeResponseBinding { + + + private BindingSearchKey bindingKey; + + private Set bindingSet = Sets.newHashSet(); + /** + * Create a response binding, defaulting any null values to match all values. + * @param method + * @param type + * @param selector + * @param extension + */ + public RuntimeResponseBinding(String method, String type, String selector, String extension ) { + bindingKey = new BindingSearchKey(method, type, selector, extension); + bindingSet.add(bindingKey.anyExtention()); + bindingSet.add(bindingKey.anySelector()); + bindingSet.add(bindingKey.anySelector().anyExtention()); + bindingSet.add(bindingKey.anyType()); + bindingSet.add(bindingKey.anyType().anyExtention()); + bindingSet.add(bindingKey.anyType().anySelector()); + bindingSet.add(bindingKey.anyType().anySelector().anyExtention()); + bindingSet.add(bindingKey.anyMethod()); + bindingSet.add(bindingKey.anyMethod().anyType()); + bindingSet.add(bindingKey.anyMethod().anyType().anyExtention()); + bindingSet.add(bindingKey.anyMethod().anyType().anySelector()); + bindingSet.add(bindingKey.anyMethod().anyType().anySelector().anyExtention()); + + } + public String getBindingKey() { + return bindingKey.getBindingKey(); + } + + public Set getRequestBindingKeys() { + return bindingSet; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/authn/AuthenticationServiceImpl.java b/extensions/resource/src/main/java/uk/co/tfd/sm/authn/AuthenticationServiceImpl.java new file mode 100644 index 00000000..be512e90 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/authn/AuthenticationServiceImpl.java @@ -0,0 +1,90 @@ +package uk.co.tfd.sm.authn; + +import java.util.Set; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.ReferenceCardinality; +import org.apache.felix.scr.annotations.ReferencePolicy; +import org.apache.felix.scr.annotations.ReferenceStrategy; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; + +import uk.co.tfd.sm.api.authn.AuthenticationService; +import uk.co.tfd.sm.api.authn.AuthenticationServiceCredentials; +import uk.co.tfd.sm.api.authn.AuthenticationServiceHandler; + +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; + +@Component(immediate = true, metatype = true) +@Service(value = AuthenticationService.class) +@Reference(name = "authenticationHandler", referenceInterface = AuthenticationServiceHandler.class, bind = "bind", unbind = "unbind", cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC, strategy = ReferenceStrategy.EVENT) +public class AuthenticationServiceImpl implements AuthenticationService { + + private static final Set TRUSTED = ImmutableSet + .of("uk.co.tfd.sm.authn.TrustedCredentials"); + @Reference + protected Repository repository; + + private AuthenticationServiceHandler[] authenticationServiceHandlers = new AuthenticationServiceHandler[0]; + private Set handlers = Sets.newHashSet(); + + public AuthenticationServiceImpl() { + } + + public AuthenticationServiceImpl(Repository repository) { + if ( repository == null ) { + throw new IllegalArgumentException("Repository cant be null"); + } + this.repository = repository; + } + + @Override + public Session authenticate(HttpServletRequest request) throws StorageClientException { + try { + AuthenticationServiceHandler[] hs = authenticationServiceHandlers; + for (AuthenticationServiceHandler h : hs) { + AuthenticationServiceCredentials c = h.getCredentials(request); + if (c != null) { + if (isTrusted(c)) { + return repository.loginAdministrative(c.getUserName()); + } else { + return repository.login(c.getUserName(), + c.getPassword()); + } + } + } + return repository.login(); + } catch (AccessDeniedException e) { + try { + return repository.login(); + } catch (AccessDeniedException e2) { + throw new StorageClientException("Unable to login as anon ",e2); + } + } + } + + private boolean isTrusted(AuthenticationServiceCredentials c) { + return TRUSTED.contains(c.getClass().getName()); + } + + protected synchronized void bind(AuthenticationServiceHandler handler) { + handlers.add(handler); + authenticationServiceHandlers = handlers + .toArray(new AuthenticationServiceHandler[handlers.size()]); + + } + + protected synchronized void unbind(AuthenticationServiceHandler handler) { + handlers.add(handler); + authenticationServiceHandlers = handlers + .toArray(new AuthenticationServiceHandler[handlers.size()]); + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/authn/BasicAuthenticationHandler.java b/extensions/resource/src/main/java/uk/co/tfd/sm/authn/BasicAuthenticationHandler.java new file mode 100644 index 00000000..f9389523 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/authn/BasicAuthenticationHandler.java @@ -0,0 +1,49 @@ +package uk.co.tfd.sm.authn; + +import java.io.UnsupportedEncodingException; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang.StringUtils; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Service; + +import uk.co.tfd.sm.api.authn.AuthenticationServiceCredentials; +import uk.co.tfd.sm.api.authn.AuthenticationServiceHandler; + +@Component(immediate=true, metatype=true) +@Service(value=AuthenticationServiceHandler.class) +public class BasicAuthenticationHandler implements AuthenticationServiceHandler { + + @Override + public AuthenticationServiceCredentials getCredentials( + HttpServletRequest request) { + String auth = request.getHeader("Authorization"); + if (auth != null && auth.startsWith("Basic ")) { + try { + final String[] userNamePW = StringUtils.split( + new String(Base64.decodeBase64(auth.substring("Basic " + .length())), "UTF-8"), ":", 2); + if (userNamePW != null && userNamePW.length == 2) { + return new AuthenticationServiceCredentials() { + + @Override + public String getUserName() { + return userNamePW[0]; + } + + @Override + public String getPassword() { + return userNamePW[1]; + } + }; + } + } catch (UnsupportedEncodingException e) { + // ignore, wont ever happen. + } + } + return null; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/authorizables/RestAuthorizableManager.java b/extensions/resource/src/main/java/uk/co/tfd/sm/authorizables/RestAuthorizableManager.java new file mode 100644 index 00000000..bb41feeb --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/authorizables/RestAuthorizableManager.java @@ -0,0 +1,175 @@ +package uk.co.tfd.sm.authorizables; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.Date; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.StreamingOutput; + +import org.apache.commons.fileupload.FileUploadException; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.SparseSessionTracker; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.authorizable.AuthorizableManager; + +import uk.co.tfd.sm.api.authn.AuthenticationService; +import uk.co.tfd.sm.api.jaxrs.JaxRestService; +import uk.co.tfd.sm.util.http.AuthorizableHelper; +import uk.co.tfd.sm.util.http.ModificationRequest; +import uk.co.tfd.sm.util.http.ResponseUtils; + +@Component(immediate = true, metatype = true) +@Service(value = JaxRestService.class) +@Path("/system/userManager") +public class RestAuthorizableManager implements JaxRestService { + + @Reference + protected SparseSessionTracker sessionTracker; + + @Reference + protected AuthenticationService authenticationService; + + @GET + @Path("{type:user|group}/{userid}.{format}") + public Response getUser(@Context HttpServletRequest request, + @PathParam(value = "type") String authorizableType, + @PathParam(value = "userid") String authorizableId, + @PathParam(value = "format") final String outputFormat) { + try { + + AuthorizableManager authorizableManager = getAuthorizableManager(request); + final Authorizable authorizable = authorizableManager + .findAuthorizable(authorizableId); + Response checkType = checkType(authorizable, authorizableType); + if (checkType != null) { + return checkType; + } + Date lastModified = new Date(); + Long lm = (Long) authorizable.getProperty(Authorizable.LASTMODIFIED_FIELD); + if ( lm == null ) { + lm = (Long) authorizable.getProperty(Authorizable.CREATED_FIELD); + } + if ( lm != null ) { + lastModified = new Date(lm); + } + return Response + .ok(new StreamingOutput() { + @Override + public void write(OutputStream output) + throws IOException, WebApplicationException { + ResponseUtils.writeTree(authorizable, outputFormat, + output); + } + }) + .type(MediaType.APPLICATION_JSON_TYPE.toString() + + "; charset=utf-8") + .lastModified(lastModified) + .build(); + + } catch (StorageClientException e) { + return ResponseUtils.getResponse( + HttpServletResponse.SC_INTERNAL_SERVER_ERROR, + e.getMessage()); + } catch (AccessDeniedException e) { + return ResponseUtils.getResponse(HttpServletResponse.SC_FORBIDDEN, + e.getMessage()); + } + } + + @POST + @Path("{type:user|group}/{userid}") + public Response doUpdateAuthorizable(@Context HttpServletRequest request, + @PathParam(value = "type") String authorizableType, + @PathParam(value = "userid") String authorizableId) { + try { + AuthorizableManager authorizableManager = getAuthorizableManager(request); + Authorizable authorizable = authorizableManager + .findAuthorizable(authorizableId); + Response checkType = checkType(authorizable, authorizableType); + if (checkType != null) { + return checkType; + } + + // process the post request. + AuthorizableHelper authorizableHelper = new AuthorizableHelper( + authorizableManager); + ModificationRequest modificationRequest = new ModificationRequest(); + modificationRequest.processRequest(request); + authorizableHelper.applyProperties(authorizable, + modificationRequest); + authorizableHelper.save(); + final List feedback = modificationRequest.getFeedback(); + + return Response + .ok(new StreamingOutput() { + @Override + public void write(OutputStream output) + throws IOException, WebApplicationException { + ResponseUtils.writeFeedback(feedback, output); + } + }) + .type(MediaType.APPLICATION_JSON_TYPE.toString() + + "; charset=utf-8").lastModified(new Date()) + .build(); + + } catch (StorageClientException e) { + return ResponseUtils.getResponse( + HttpServletResponse.SC_INTERNAL_SERVER_ERROR, + e.getMessage()); + } catch (AccessDeniedException e) { + return ResponseUtils.getResponse(HttpServletResponse.SC_FORBIDDEN, + e.getMessage()); + } catch (IOException e) { + return ResponseUtils.getResponse( + HttpServletResponse.SC_INTERNAL_SERVER_ERROR, + e.getMessage()); + } catch (FileUploadException e) { + return ResponseUtils.getResponse( + HttpServletResponse.SC_INTERNAL_SERVER_ERROR, + e.getMessage()); + } + } + + private AuthorizableManager getAuthorizableManager( + HttpServletRequest request) throws StorageClientException { + Session session = sessionTracker.get(request); + if (session == null) { + + session = sessionTracker.register( + authenticationService.authenticate(request), request); + } + return session.getAuthorizableManager(); + } + + private Response checkType(Authorizable authorizable, + String authorizableType) { + if (authorizable == null) { + return ResponseUtils.getResponse(HttpServletResponse.SC_NOT_FOUND, + "Authorizable not found"); + } + if (("group".equals(authorizableType) && !authorizable.isGroup()) + || ("user".equals(authorizableType) && authorizable.isGroup())) { + return ResponseUtils.getResponse( + HttpServletResponse.SC_BAD_REQUEST, + "Request found the wrong type of object"); + } + return null; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/BindingSearchKey.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/BindingSearchKey.java new file mode 100644 index 00000000..eea26e80 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/BindingSearchKey.java @@ -0,0 +1,112 @@ +package uk.co.tfd.sm.resource; + +public class BindingSearchKey implements Comparable { + + /** + * Matches any combination of the type. + */ + public static final String ANY = "ANY"; + + /** + * Requires that there are none of the type to match the binding. + */ + public static final String NONE = "NONE"; + + private String method; + private String type; + private String selector; + private String extension; + + private String key; + + private int hashCode; + + private int sortOrder; + + public BindingSearchKey(String method, String type, + String selector, String extension) { + this.method = checkAny(method); + this.type = checkAny(type); + this.selector = checkAny(selector); + this.extension = checkAny(extension); + init(); + } + + private void init() { + key = method+";"+type+";"+selector+";"+extension; + hashCode = key.hashCode(); + sortOrder = 0; + if ( !ANY.equals(method) ) { + sortOrder += 8; + } + if ( !ANY.equals(type) ) { + sortOrder += 4; + } + if ( !ANY.equals(selector) ) { + sortOrder += 2; + } + if ( !ANY.equals(extension) ) { + sortOrder += 1; + } + } + + private String checkAny(String v) { + if ( v == null || v.trim().length() == 0 ) { + return BindingSearchKey.ANY; + } + return v; + } + + public String getBindingKey() { + return key; + } + + + public BindingSearchKey anyExtention() { + if ( ANY.equals(extension)) { + return this; + } + return new BindingSearchKey(method, type, selector, ANY); + } + + public BindingSearchKey anySelector() { + if ( ANY.equals(selector)) { + return this; + } + return new BindingSearchKey(method, type, ANY, extension); + } + + + public BindingSearchKey anyType() { + if ( ANY.equals(type)) { + return this; + } + return new BindingSearchKey(method, ANY, type, extension); + } + + public BindingSearchKey anyMethod() { + if ( ANY.equals(method)) { + return this; + } + return new BindingSearchKey(ANY, type, selector, extension); + } + + @Override + public int hashCode() { + return hashCode; + } + @Override + public boolean equals(Object obj) { + if ( obj == null ) { + return false; + } + return hashCode() == obj.hashCode(); + } + + @Override + public int compareTo(BindingSearchKey o) { + return o.sortOrder - sortOrder; + } + + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/DefaultResourceHandler.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/DefaultResourceHandler.java new file mode 100644 index 00000000..c12af7bf --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/DefaultResourceHandler.java @@ -0,0 +1,179 @@ +package uk.co.tfd.sm.resource; + +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.core.Context; + +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Deactivate; +import org.apache.felix.scr.annotations.Modified; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.SparseSessionTracker; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.authn.AuthenticationService; +import uk.co.tfd.sm.api.jaxrs.JaxRestService; +import uk.co.tfd.sm.api.resource.Adaptable; +import uk.co.tfd.sm.api.resource.Resource; +import uk.co.tfd.sm.api.resource.ResponseFactoryManager; +import uk.co.tfd.sm.api.resource.SafeMethodResponse; + +@Component(immediate = true, metatype = true) +@Service(value = JaxRestService.class) +@Path("/") +public class DefaultResourceHandler implements JaxRestService, Adaptable { + + private static final String DEFAULT_MAPPED_ROOT_PATH = "/"; + + @Property(value = DEFAULT_MAPPED_ROOT_PATH) + protected static final String MAPPED_ROOT_PATH = "mapped-root-path"; + + private static final Logger LOGGER = LoggerFactory + .getLogger(DefaultResourceHandler.class); + + @Reference + protected SparseSessionTracker sessionTracker; + + @Reference + protected ResponseFactoryManager resourceFactory; + + @Reference + protected AuthenticationService authenticationService; + + private String basePath = ""; + + @Activate + public void activate(Map properties) { + modified(properties); + } + + @Deactivate + public void deactivate(Map properties) { + + } + + @Modified + public void modified(Map properties) { + basePath = (String) properties.get(MAPPED_ROOT_PATH); + if (basePath == null) { + basePath = DEFAULT_MAPPED_ROOT_PATH; + } + } + + @Path("/{resource:.*}") + public Adaptable getResource(@Context HttpServletRequest request, + @Context HttpServletResponse response, + @PathParam("resource") String path) throws StorageClientException, + AccessDeniedException { + boolean debug = LOGGER.isDebugEnabled(); + if (debug) { + LOGGER.debug("Got Request at {} ", request.getRequestURI()); + } + path = basePath + path; + Session session = sessionTracker.get(request); + if (session == null) { + + session = sessionTracker.register( + authenticationService.authenticate(request), request); + } + ContentManager contentManager = session.getContentManager(); + + // start with the full path, and shorten it, first by . then by / + Content content = contentManager.get(path); + if (content != null) { + if (debug) { + LOGGER.debug("Got {} at [{}] ", content, path); + } + return getResponse(request, response, session, content, path, path, + path); + } + if (debug) { + LOGGER.debug("Nothing at [{}] ", path); + } + char[] pathChars = path.toCharArray(); + String toCreatePath = path; + boolean inname = true; + for (int i = pathChars.length - 1; i >= 0; i--) { + char c = pathChars[i]; + switch (c) { + case '.': + if (inname) { + toCreatePath = path.substring(0, i); + content = contentManager.get(toCreatePath); + if (content != null) { + if (debug) { + LOGGER.debug("Getting response for {} {} ", path, + toCreatePath); + } + return getResponse(request, response, session, content, + toCreatePath, path, toCreatePath); + } + if (debug) { + LOGGER.debug("Nothing at [{}] ", toCreatePath); + } + } + break; + case '/': + inname = false; + String testpath = path.substring(0, i); + content = contentManager.get(testpath); + if (content != null) { + return getResponse(request, response, session, content, + testpath, path, toCreatePath); + } + if (debug) { + LOGGER.debug("Nothing at [{}] ", testpath); + } + break; + } + } + if (debug) { + LOGGER.debug("Not Found [{}] ", path); + } + return getResponse(request, response, session, null, path, path, + toCreatePath); + } + + private Adaptable getResponse(HttpServletRequest request, + HttpServletResponse response, Session session, Content content, + String resolvedPath, String requestPath, String toCreatePath) { + Resource resource = new ResourceImpl(this, request, response, session, + content, resolvedPath, requestPath, toCreatePath); + boolean debug = LOGGER.isDebugEnabled(); + if (debug) { + LOGGER.debug("Processing Resource {} ", resource); + } + Adaptable aresponse = resourceFactory.createResponse(resource); + if (response instanceof SafeMethodResponse + && !SafeMethodResponse.COMPATABLE_METHODS.contains(request + .getMethod())) { + LOGGER.warn(" Response {} is not suitable for {} methods ", + response, request.getMethod()); + } + if (debug) { + LOGGER.debug("Mapped to Response {} ", aresponse); + } + return aresponse; + } + + @SuppressWarnings("unchecked") + public T adaptTo(Class type) { + if (ResponseFactoryManager.class.equals(type)) { + return (T) resourceFactory; + } + return null; + } +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/DefaultResponse.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/DefaultResponse.java new file mode 100644 index 00000000..ea58f383 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/DefaultResponse.java @@ -0,0 +1,187 @@ +package uk.co.tfd.sm.resource; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Date; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.StreamingOutput; + +import org.apache.commons.fileupload.FileUploadException; +import org.apache.commons.io.IOUtils; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.resource.Adaptable; +import uk.co.tfd.sm.api.resource.Resource; +import uk.co.tfd.sm.util.http.ContentHelper; +import uk.co.tfd.sm.util.http.ContentRequestStreamProcessor; +import uk.co.tfd.sm.util.http.ModificationRequest; +import uk.co.tfd.sm.util.http.ResponseUtils; + +public class DefaultResponse implements Adaptable { + + private static final Logger LOGGER = LoggerFactory + .getLogger(DefaultResponse.class); + private Adaptable adaptable; + private boolean debug; + + public DefaultResponse(Adaptable adaptable) { + debug = LOGGER.isDebugEnabled(); + this.adaptable = adaptable; + } + + @GET + public Response doGet() throws IOException { + try { + Resource resource = adaptable.adaptTo(Resource.class); + final String requestExt = resource.getRequestExt(); + final String[] selectors = resource.getRequestSelectors(); + final Content content = adaptTo(Content.class); + if (content == null) { + return ResponseUtils.getResponse( + HttpServletResponse.SC_NOT_FOUND, "Not Found"); + } + if (!content.getPath().equals(resource.getToCreatePath())) { + // ie the Content item does not exist. + return ResponseUtils.getResponse( + HttpServletResponse.SC_NOT_FOUND, + "Not Found " + content.getPath() + " is not " + + resource.getToCreatePath()); + } + if (debug) { + LOGGER.debug("Get found Resource:[{}] Content:[{}]", resource, + content); + } + if (requestExt == null || requestExt.isEmpty()) { + Session session = adaptTo(Session.class); + final ContentManager contentManager = session + .getContentManager(); + final InputStream in = contentManager.getInputStream(content + .getPath()); + + return Response + .ok(new StreamingOutput() { + @Override + public void write(OutputStream output) + throws IOException, WebApplicationException { + IOUtils.copy(in, output); + in.close(); + + } + }).type(adaptTo(MediaType.class)) + .lastModified(adaptTo(Date.class)).build(); + } else if ("json".equals(requestExt)) { + + return Response + .ok(new StreamingOutput() { + @Override + public void write(OutputStream output) + throws IOException, WebApplicationException { + ResponseUtils.writeTree(content, selectors, + output); + } + }) + .type(MediaType.APPLICATION_JSON_TYPE.toString() + + "; charset=utf-8") + .lastModified(adaptTo(Date.class)).build(); + } else if ("xml".equals(requestExt)) { + return Response + .ok(content.getProperties()) + .type(MediaType.APPLICATION_XML_TYPE.toString() + + "; charset=utf-8") + .lastModified(adaptTo(Date.class)).build(); + } + return ResponseUtils.getResponse( + HttpServletResponse.SC_BAD_REQUEST, "format " + requestExt + + " not recognised"); + } catch (StorageClientException e) { + return ResponseUtils.getResponse( + HttpServletResponse.SC_INTERNAL_SERVER_ERROR, + e.getMessage()); + + } catch (AccessDeniedException e) { + return ResponseUtils.getResponse(HttpServletResponse.SC_FORBIDDEN, + e.getMessage()); + } + } + + @POST + public Response doPost() throws IOException { + if (debug) { + LOGGER.debug("Executing POST "); + } + Resource resource = adaptable.adaptTo(Resource.class); + HttpServletRequest request = adaptable + .adaptTo(HttpServletRequest.class); + Session session = adaptTo(Session.class); + try { + ContentManager contentManager = session.getContentManager(); + String contentPath = resource.getToCreatePath(); + + ContentHelper contentHelper = new ContentHelper(contentManager); + Content content = contentHelper.getOrCreateContent(contentPath); + ContentRequestStreamProcessor contentRequestStreamProcessor = new ContentRequestStreamProcessor(content, contentManager, contentHelper); + ModificationRequest modificationRequest = new ModificationRequest(contentRequestStreamProcessor); + modificationRequest.processRequest(request); + contentHelper.applyProperties(content, modificationRequest); + contentHelper.save(); + final List feedback = modificationRequest.getFeedback(); + + + + + return Response + .ok(new StreamingOutput() { + @Override + public void write(OutputStream output) + throws IOException, WebApplicationException { + ResponseUtils.writeFeedback(feedback, output); + } + }) + .type(MediaType.APPLICATION_JSON_TYPE.toString() + + "; charset=utf-8") + .lastModified(new Date()).build(); + } catch (StorageClientException e) { + if (debug) { + LOGGER.debug(e.getMessage(), e); + } + return ResponseUtils.getResponse( + HttpServletResponse.SC_INTERNAL_SERVER_ERROR, + e.getMessage()); + + } catch (AccessDeniedException e) { + if (debug) { + LOGGER.debug(e.getMessage(), e); + } + return ResponseUtils.getResponse(HttpServletResponse.SC_FORBIDDEN, + e.getMessage()); + } catch (FileUploadException e) { + if (debug) { + LOGGER.debug(e.getMessage(), e); + } + throw new IOException(e); + } + + } + + + + + public T adaptTo(Class type) { + return adaptable.adaptTo(type); + } +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/DefaultResponseFactory.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/DefaultResponseFactory.java new file mode 100644 index 00000000..6cb21a35 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/DefaultResponseFactory.java @@ -0,0 +1,24 @@ +package uk.co.tfd.sm.resource; + +import uk.co.tfd.sm.api.resource.Adaptable; +import uk.co.tfd.sm.api.resource.ResponseFactory; +import uk.co.tfd.sm.api.resource.binding.ResponseBinding; +import uk.co.tfd.sm.api.resource.binding.ResponseBindingList; +import uk.co.tfd.sm.api.resource.binding.ResponseBindings; +import uk.co.tfd.sm.api.resource.binding.RuntimeResponseBinding; + +@ResponseBindings(value = { @ResponseBinding(method = { "GET" }, extension = {}, selectors = {}, type = {}) }) +public class DefaultResponseFactory implements ResponseFactory { + + public int compareTo(ResponseFactory arg0) { + return 1; // always last + } + + public ResponseBindingList getBindings() { + return new ResponseBindingList(new RuntimeResponseBinding("GET",BindingSearchKey.ANY, BindingSearchKey.ANY, BindingSearchKey.ANY)); + } + + public Adaptable getResponse(Adaptable adaptable) { + return new DefaultResponse(adaptable); + } +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/ResourceImpl.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/ResourceImpl.java new file mode 100644 index 00000000..33d57ffc --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/ResourceImpl.java @@ -0,0 +1,224 @@ +package uk.co.tfd.sm.resource; + +import java.io.IOException; +import java.io.InputStream; +import java.text.MessageFormat; +import java.util.Arrays; +import java.util.Date; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.MediaType; + +import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; + +import uk.co.tfd.sm.api.resource.Adaptable; +import uk.co.tfd.sm.api.resource.Resource; +import uk.co.tfd.sm.api.resource.ResourceErrorException; +import uk.co.tfd.sm.api.resource.ResourceForbiddenException; +import uk.co.tfd.sm.api.resource.binding.ResponseBindingList; +import uk.co.tfd.sm.api.resource.binding.RuntimeResponseBinding; + +import com.google.common.collect.Lists; + +public class ResourceImpl implements Resource { + + private static final String RESOURCE_TYPE_FIELD = "resourceType"; + private static final String DEFAULT_RESOURCE_TYPE = MediaType.APPLICATION_OCTET_STREAM; + private HttpServletRequest request; + private Session session; + private Content content; + private String resolvedPath; + private String requestPath; + private String pathInfo; + private String requestName; + private String[] requestSelectors; + private String requestExt; + private Adaptable resourceHandler; + private ResponseBindingList responseBindingList; + private String resourceType; + private HttpServletResponse response; + private Date lastModified; + private MediaType mediaType; + /** + * The last name path is the longest path before a / in the request URL. + * It indicates the resource that should be created if one would be created. + */ + private String toCreatePath; + + public ResourceImpl(Adaptable resourceHandler, HttpServletRequest request, + HttpServletResponse response, Session session, Content content, String resolvedPath, + String requestPath, String lastNamePath) { + this.resourceHandler = resourceHandler; + this.request = request; + this.response = response; + this.session = session; + this.content = content; + this.resolvedPath = resolvedPath; + this.requestPath = requestPath; + this.pathInfo = requestPath.substring(resolvedPath.length()); + this.toCreatePath = lastNamePath; + int lastSlash = pathInfo.lastIndexOf('/'); + if (lastSlash == pathInfo.length() - 1) { + this.requestName = ""; + setParts(""); // ends with / + } else { + int dot = pathInfo.indexOf(".", lastSlash+1); + if ( dot >= 0 ) { + this.requestName = pathInfo.substring(lastSlash+1,dot); + setParts(pathInfo.substring(dot)); // there was a / + } else { + this.requestName = pathInfo.substring(lastSlash+1); + setParts(""); + } + } + List bindingList = Lists.newArrayList(); + resourceType = getType(); + mediaType = MediaType.valueOf(resourceType); + lastModified = getLastModified(); + String method = request.getMethod(); + String bindingExt = checkAny(requestExt); + for (String selector : checkAny(requestSelectors)) { + bindingList.add(new RuntimeResponseBinding(method, resourceType, + selector, bindingExt)); + } + responseBindingList = new ResponseBindingList( + bindingList.toArray(new RuntimeResponseBinding[bindingList + .size()])); + } + + private Date getLastModified() { + if ( content != null && content.hasProperty(Content.LASTMODIFIED_FIELD)) { + return new Date((Long) content.getProperty(Content.LASTMODIFIED_FIELD)); + } + return new Date(0); + } + + private String[] checkAny(String[] spec) { + if ( spec == null || spec.length == 0 ) { + return new String[]{BindingSearchKey.ANY}; + } + return spec; + } + + private String checkAny(String spec) { + if ( spec == null ) { + return BindingSearchKey.ANY; + } + return spec; + } + + private String getType() { + if ( content != null ) { + if (content.hasProperty(RESOURCE_TYPE_FIELD)) { + return (String) content.getProperty(RESOURCE_TYPE_FIELD); + } + if (content.hasProperty(Content.MIMETYPE_FIELD)) { + return (String) content.getProperty(Content.MIMETYPE_FIELD); + } + } + return DEFAULT_RESOURCE_TYPE; + } + + private void setParts(String namePathInfo) { + String[] parts = StringUtils.split(namePathInfo, '.'); + switch (parts.length) { + case 0: + this.requestSelectors = new String[0]; + this.requestExt = ""; + break; + case 1: + this.requestSelectors = new String[0]; + this.requestExt = parts[0]; + break; + default: + this.requestSelectors = Arrays.copyOfRange(parts, 0, + parts.length - 1); + this.requestExt = parts[parts.length - 1]; + break; + } + } + + @SuppressWarnings("unchecked") + public T adaptTo(Class type) { + try { + if (Resource.class.equals(type)) { + return (T) this; + } else if (ResponseBindingList.class.equals(type)) { + return (T) responseBindingList; + } else if ( MediaType.class.equals(type)) { + return (T) mediaType; + } else if ( Date.class.equals(type)) { + return (T) lastModified; + } else if (Session.class.equals(type)) { + return (T) session; + } else if (InputStream.class.equals(type)) { + if (content == null) { + return null; + } + return (T) session.getContentManager().getInputStream( + content.getPath()); + } else if (Content.class.equals(type)) { + return (T) content; + } else if (HttpServletRequest.class.equals(type)) { + return (T) request; + } else if (HttpServletResponse.class.equals(type)) { + return (T) response; + } else { + return (T) resourceHandler.adaptTo(type); + } + } catch (StorageClientException e) { + throw new ResourceErrorException(e.getMessage(), e); + } catch (AccessDeniedException e) { + throw new ResourceForbiddenException(e.getMessage(), e); + } catch (IOException e) { + throw new ResourceErrorException(e.getMessage(), e); + } + } + + public String getResolvedPath() { + return resolvedPath; + } + + public String getRequestPath() { + return requestPath; + } + + public String getPathInfo() { + return pathInfo; + } + + public String[] getRequestSelectors() { + return requestSelectors; + } + + public String getRequestExt() { + return requestExt; + } + + public String getRequestName() { + return requestName; + } + + public String getResourceType() { + return resourceType; + } + + public String getToCreatePath() { + return toCreatePath; + } + + @Override + public String toString() { + return MessageFormat.format( + "requestPath=[{0}] resource path=[{1}], resourceType=[{2}], selectors={3}, ext=[{4}], toCreatePath=[{5}]", + requestPath, resolvedPath, resourceType, Arrays.toString(requestSelectors), + requestExt, toCreatePath); + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/ResponseFactoryManagerImpl.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/ResponseFactoryManagerImpl.java new file mode 100644 index 00000000..5d8fbeeb --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/ResponseFactoryManagerImpl.java @@ -0,0 +1,182 @@ +package uk.co.tfd.sm.resource; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.ReferenceCardinality; +import org.apache.felix.scr.annotations.ReferencePolicy; +import org.apache.felix.scr.annotations.ReferenceStrategy; +import org.apache.felix.scr.annotations.Service; + +import uk.co.tfd.sm.api.resource.Adaptable; +import uk.co.tfd.sm.api.resource.ResponseFactory; +import uk.co.tfd.sm.api.resource.ResponseFactoryManager; +import uk.co.tfd.sm.api.resource.binding.ResponseBinding; +import uk.co.tfd.sm.api.resource.binding.ResponseBindingList; +import uk.co.tfd.sm.api.resource.binding.ResponseBindings; +import uk.co.tfd.sm.api.resource.binding.RuntimeResponseBinding; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +@Component(immediate = true, metatype = true) +@Service(value = ResponseFactoryManager.class) +@Reference(name = "responseFactory", referenceInterface = ResponseFactory.class, cardinality = ReferenceCardinality.OPTIONAL_MULTIPLE, policy = ReferencePolicy.DYNAMIC, strategy = ReferenceStrategy.EVENT, unbind = "unbind", bind = "bind") +public class ResponseFactoryManagerImpl implements ResponseFactoryManager { + + private static final String[] ANY_ARRAY = { BindingSearchKey.ANY }; + private static final String[] EMPTY_ARRAY = { BindingSearchKey.NONE }; + private Map> responseFactoryBindingsStore = Maps + .newHashMap(); + private Map> responseFactoryBindings = ImmutableMap + .of(); + + public Adaptable createResponse(Adaptable resource) { + ResponseBindingList responseBinding = resource + .adaptTo(ResponseBindingList.class); + + Set searchKeySet = Sets.newHashSet(); + for ( RuntimeResponseBinding rm : responseBinding ) { + searchKeySet.addAll(rm.getRequestBindingKeys()); + } + Set responseFactoryCandidates = Sets.newHashSet(); + BindingSearchKey[] searchKeys = searchKeySet.toArray(new BindingSearchKey[searchKeySet.size()]); + Arrays.sort(searchKeys); + for ( BindingSearchKey rm : searchKeys ) { + String bindingKey = rm.getBindingKey(); + Set bindingSet = responseFactoryBindings + .get(bindingKey); + if (bindingSet != null) { + responseFactoryCandidates.addAll(bindingSet); + } + } + if (responseFactoryCandidates.size() == 0) { + return new DefaultResponse(resource); + } else { + return Collections.max(responseFactoryCandidates).getResponse( + resource); + } + } + + protected void bind(ResponseFactory responseFactory) { + synchronized (responseFactoryBindingsStore) { + ResponseBindings responseBindings = responseFactory.getClass() + .getAnnotation(ResponseBindings.class); + if (responseBindings != null) { + for (ResponseBinding rb : responseBindings.value()) { + String[] methods = checkAny(rb.method()); + String[] types = checkAny(rb.type()); + String[] selectors = checkEmpty(rb.selectors()); + String[] extensions = checkEmpty(rb.extension()); + for (String m : methods) { + for (String t : types) { + for (String s : selectors) { + for (String e : extensions) { + addBinding(new RuntimeResponseBinding(m, t, + s, e), responseFactory); + } + } + } + } + } + } + + ResponseBindingList bindings = responseFactory.getBindings(); + if (bindings != null) { + for (RuntimeResponseBinding rm : bindings) { + addBinding(rm, responseFactory); + } + } + save(); + } + } + + protected void unbind(ResponseFactory responseFactory) { + synchronized (responseFactoryBindingsStore) { + ResponseBindings responseBindings = responseFactory.getClass() + .getAnnotation(ResponseBindings.class); + if (responseBindings != null) { + for (ResponseBinding rb : responseBindings.value()) { + String[] methods = checkAny(rb.method()); + String[] types = checkAny(rb.type()); + String[] selectors = checkEmpty(rb.selectors()); + String[] extensions = checkEmpty(rb.extension()); + for (String m : methods) { + for (String t : types) { + for (String s : selectors) { + for (String e : extensions) { + removeBinding(new RuntimeResponseBinding(m, + t, s, e), responseFactory); + } + } + } + } + } + } + + ResponseBindingList bindings = responseFactory.getBindings(); + if (bindings != null) { + for (RuntimeResponseBinding rm : bindings) { + removeBinding(rm, responseFactory); + } + } + save(); + } + } + + private void save() { + Builder> b = ImmutableMap.builder(); + for (Entry> e : responseFactoryBindingsStore + .entrySet()) { + b.put(e.getKey(), ImmutableSet.copyOf(e.getValue())); + } + responseFactoryBindings = b.build(); + } + + private void removeBinding(RuntimeResponseBinding rm, + ResponseFactory responseFactory) { + String bindingKey = rm.getBindingKey(); + Set bindingSet = responseFactoryBindingsStore + .get(bindingKey); + if (bindingSet != null) { + bindingSet.remove(responseFactory); + if (bindingSet.size() == 0) { + responseFactoryBindingsStore.remove(bindingKey); + } + } + } + + private void addBinding(RuntimeResponseBinding rm, ResponseFactory rf) { + String bindingKey = rm.getBindingKey(); + Set bindingSet = responseFactoryBindingsStore + .get(bindingKey); + if (bindingSet == null) { + bindingSet = Sets.newHashSet(); + responseFactoryBindingsStore.put(bindingKey, bindingSet); + } + bindingSet.add(rf); + } + + private String[] checkEmpty(String[] v) { + if (v == null || v.length == 0) { + return EMPTY_ARRAY; + } + return v; + } + + private String[] checkAny(String[] v) { + if (v == null || v.length == 0) { + return ANY_ARRAY; + } + return v; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/BooleanType.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/BooleanType.java new file mode 100644 index 00000000..8c5d2475 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/BooleanType.java @@ -0,0 +1,24 @@ +package uk.co.tfd.sm.resource.types; + + +public class BooleanType implements RequestParameterType{ + + @Override + public String getType() { + return RequestParameterType.BOOLEAN; + } + + @Override + public Boolean newInstance(Object value) { + if ( value instanceof Boolean ) { + return (Boolean) value; + } + return Boolean.parseBoolean(String.valueOf(value)); + } + + @Override + public Class getComponentType() { + return Boolean.class; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/CalendarType.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/CalendarType.java new file mode 100644 index 00000000..6e934b2a --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/CalendarType.java @@ -0,0 +1,42 @@ +package uk.co.tfd.sm.resource.types; + +import java.util.Calendar; +import java.util.Date; + +import org.sakaiproject.nakamura.api.lite.util.ISO8601Date; + +public class CalendarType implements RequestParameterType{ + + @Override + public String getType() { + return RequestParameterType.CALENDAR; + } + + @Override + public ISO8601Date newInstance(Object value) { + if ( value instanceof ISO8601Date) { + return (ISO8601Date) value; + } else if ( value instanceof Calendar) { + ISO8601Date c = new ISO8601Date(); + c.setTimeInMillis(((Calendar) value).getTimeInMillis()); + c.setTimeZone(((Calendar) value).getTimeZone()); + return c; + } else if ( value instanceof Date) { + ISO8601Date c = new ISO8601Date(); + c.setTime((Date) value); + return c; + } else if ( value instanceof Long ) { + ISO8601Date c = new ISO8601Date(); + c.setTimeInMillis((Long) value); + return c; + } + + return new ISO8601Date(String.valueOf(value)); + } + + @Override + public Class getComponentType() { + return ISO8601Date.class; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/DoubleType.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/DoubleType.java new file mode 100644 index 00000000..a7d313eb --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/DoubleType.java @@ -0,0 +1,23 @@ +package uk.co.tfd.sm.resource.types; + +public class DoubleType implements RequestParameterType { + + @Override + public String getType() { + return RequestParameterType.DOUBLE; + } + + @Override + public Double newInstance(Object value) { + if ( value instanceof Double ) { + return (Double) value; + } + return Double.parseDouble(String.valueOf(value)); + } + + @Override + public Class getComponentType() { + return Double.class; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/IntegerType.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/IntegerType.java new file mode 100644 index 00000000..5f4de959 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/IntegerType.java @@ -0,0 +1,23 @@ +package uk.co.tfd.sm.resource.types; + +public class IntegerType implements RequestParameterType { + + @Override + public String getType() { + return RequestParameterType.INTEGER; + } + + @Override + public Integer newInstance(Object value) { + if ( value instanceof Integer ) { + return (Integer) value; + } + return Integer.parseInt(String.valueOf(value)); + } + + @Override + public Class getComponentType() { + return Integer.class; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/LongType.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/LongType.java new file mode 100644 index 00000000..7542df57 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/LongType.java @@ -0,0 +1,23 @@ +package uk.co.tfd.sm.resource.types; + +public class LongType implements RequestParameterType { + + @Override + public String getType() { + return RequestParameterType.LONG; + } + + @Override + public Long newInstance(Object value) { + if ( value instanceof Long ) { + return (Long) value; + } + return Long.parseLong(String.valueOf(value)); + } + + @Override + public Class getComponentType() { + return Long.class; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/RequestParameterType.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/RequestParameterType.java new file mode 100644 index 00000000..d7ae7754 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/RequestParameterType.java @@ -0,0 +1,29 @@ +package uk.co.tfd.sm.resource.types; + +public interface RequestParameterType { + + public final static String STRING = "String"; + public static final String INTEGER = "Integer"; + public static final String LONG = "Long"; + public static final String DOUBLE = "Double"; + public static final String CALENDAR = "Calendar"; + public static final String BOOLEAN = "Boolean"; + + /** + * @return the extenal name of the type. + */ + String getType(); + + /** + * A new instance + * @param value the value, not a scalar. + * @return + */ + T newInstance(Object value); + + /** + * @return the class that this RequestParameterType implementation produces. + */ + Class getComponentType(); + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/StringType.java b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/StringType.java new file mode 100644 index 00000000..9d1b1c97 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/resource/types/StringType.java @@ -0,0 +1,20 @@ +package uk.co.tfd.sm.resource.types; + +public class StringType implements RequestParameterType { + + @Override + public String getType() { + return RequestParameterType.STRING; + } + + @Override + public String newInstance(Object value) { + return String.valueOf(value); + } + + @Override + public Class getComponentType() { + return String.class; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/gson/adapters/AuthorizableTypeAdapter.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/gson/adapters/AuthorizableTypeAdapter.java new file mode 100644 index 00000000..ce0dfb58 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/gson/adapters/AuthorizableTypeAdapter.java @@ -0,0 +1,27 @@ +package uk.co.tfd.sm.util.gson.adapters; + +import java.lang.reflect.Type; +import java.util.Map.Entry; + +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +public class AuthorizableTypeAdapter implements JsonSerializer { + + + @Override + public JsonElement serialize(Authorizable authorizable, Type type, + JsonSerializationContext context) { + JsonObject jsonObject = new JsonObject(); + for ( Entry e : authorizable.getProperties().entrySet() ) { + jsonObject.add(e.getKey(), context.serialize(e.getValue())); + } + return jsonObject; + } + + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/gson/adapters/CalenderTypeAdapter.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/gson/adapters/CalenderTypeAdapter.java new file mode 100644 index 00000000..7cdca768 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/gson/adapters/CalenderTypeAdapter.java @@ -0,0 +1,27 @@ +package uk.co.tfd.sm.util.gson.adapters; + +import java.lang.reflect.Type; +import java.util.Calendar; + +import org.sakaiproject.nakamura.api.lite.util.ISO8601Date; + +import com.google.gson.JsonElement; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +public class CalenderTypeAdapter implements JsonSerializer{ + + @Override + public JsonElement serialize(Calendar calendar, Type calendarType, + JsonSerializationContext context) { + if ( calendar instanceof ISO8601Date ) { + return new JsonPrimitive(calendar.toString()); + } + ISO8601Date d = new ISO8601Date(); + d.setTimeInMillis(calendar.getTimeInMillis()); + d.setTimeZone(calendar.getTimeZone()); + return new JsonPrimitive(d.toString()); + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/gson/adapters/ContentTypeAdapter.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/gson/adapters/ContentTypeAdapter.java new file mode 100644 index 00000000..f688c2c4 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/gson/adapters/ContentTypeAdapter.java @@ -0,0 +1,40 @@ +package uk.co.tfd.sm.util.gson.adapters; + +import java.lang.reflect.Type; +import java.util.Map.Entry; + +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.content.Content; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +public class ContentTypeAdapter implements JsonSerializer { + + private int depth; + + public ContentTypeAdapter(int recursion) { + depth = recursion; + } + + @Override + public JsonElement serialize(Content content, Type type, + JsonSerializationContext context) { + JsonObject jsonObject = new JsonObject(); + for ( Entry e : content.getProperties().entrySet() ) { + jsonObject.add(e.getKey(), context.serialize(e.getValue())); + } + if ( depth > 0 ) { + depth--; + for ( Content child : content.listChildren()) { + jsonObject.add(StorageClientUtils.getObjectName(child.getPath()), context.serialize(child)); + } + depth++; + } + return jsonObject; + } + + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/AuthorizableHelper.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/AuthorizableHelper.java new file mode 100644 index 00000000..c9327539 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/AuthorizableHelper.java @@ -0,0 +1,79 @@ +package uk.co.tfd.sm.util.http; + +import java.util.Map; +import java.util.Map.Entry; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.authorizable.AuthorizableManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Maps; + +public class AuthorizableHelper { + + private static final Logger LOGGER = LoggerFactory.getLogger(AuthorizableHelper.class); + private Map toSave; + private AuthorizableManager authorizableManager; + private boolean debug; + + public AuthorizableHelper(AuthorizableManager authorizableManager) { + this.authorizableManager = authorizableManager; + toSave = Maps.newHashMap(); + this.debug = LOGGER.isDebugEnabled(); + } + + public void applyProperties(Authorizable authorizable, ModificationRequest modificationRequest) { + for (Entry e : modificationRequest.getParameterSet(ParameterType.REMOVE).entrySet()) { + authorizable.removeProperty(e.getKey()); + } + + for (Entry e : modificationRequest.getParameterSet(ParameterType.ADD).entrySet()) { + authorizable.setProperty(e.getKey(), e.getValue()); + } + modificationRequest.resetProperties(); + + + toSave.put(authorizable.getId(), authorizable); + } + + public Authorizable getOrCreateAuthorizable(String authorizableId, String authorizableType) + throws StorageClientException, AccessDeniedException { + Authorizable authorizable = toSave.get(authorizableId); + if (authorizable == null) { + authorizable = authorizableManager.findAuthorizable(authorizableId); + if (authorizable == null) { + if (debug) { + LOGGER.debug("Created A New Unsaved Authorizable object {} ", + authorizableId); + } + if ("user".equals(authorizableType)) { + if (!authorizableManager.createUser(authorizableId, authorizableId, null, null) ) { + throw new IllegalArgumentException("Unable to create user "+authorizableId); + } + } else { + if (!authorizableManager.createGroup(authorizableId, authorizableId, null) ) { + throw new IllegalArgumentException("Unable to create user "+authorizableId); + } + } + authorizable = authorizableManager.findAuthorizable(authorizableId); + } else if (debug) { + LOGGER.debug("Content Existed at {} ", authorizable); + } + toSave.put(authorizableId, authorizable); + } + return authorizable; + } + + public void save() throws AccessDeniedException, StorageClientException { + for (Authorizable a : toSave.values()) { + authorizableManager.updateAuthorizable(a); + if (debug) { + LOGGER.debug("Updated {} ", a); + } + } + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ContentHelper.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ContentHelper.java new file mode 100644 index 00000000..a9248df6 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ContentHelper.java @@ -0,0 +1,69 @@ +package uk.co.tfd.sm.util.http; + +import java.util.Map; +import java.util.Map.Entry; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Maps; + +public class ContentHelper { + + private static final Logger LOGGER = LoggerFactory.getLogger(ContentHelper.class); + private Map toSave; + private ContentManager contentManager; + private boolean debug; + + public ContentHelper(ContentManager contentManager) { + this.contentManager = contentManager; + toSave = Maps.newHashMap(); + this.debug = LOGGER.isDebugEnabled(); + } + + public void applyProperties(Content content, ModificationRequest modificationRequest) { + for (Entry e : modificationRequest.getParameterSet(ParameterType.REMOVE).entrySet()) { + content.removeProperty(e.getKey()); + } + + for (Entry e : modificationRequest.getParameterSet(ParameterType.ADD).entrySet()) { + content.setProperty(e.getKey(), e.getValue()); + } + modificationRequest.resetProperties(); + // add to the save list, just in case the content didnt come from here + toSave.put(content.getPath(), content); + } + + public Content getOrCreateContent(String contentPath) + throws StorageClientException, AccessDeniedException { + Content content = toSave.get(contentPath); + if (content == null) { + content = contentManager.get(contentPath); + if (content == null) { + if (debug) { + LOGGER.debug("Created A New Unsaved Content object {} ", + contentPath); + } + content = new Content(contentPath, null); + } else if (debug) { + LOGGER.debug("Content Existed at {} ", content); + } + toSave.put(contentPath, content); + } + return content; + } + + public void save() throws AccessDeniedException, StorageClientException { + for (Content c : toSave.values()) { + contentManager.update(c); + if (debug) { + LOGGER.debug("Updated {} ", c); + } + } + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ContentRequestStreamProcessor.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ContentRequestStreamProcessor.java new file mode 100644 index 00000000..9a95ef55 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ContentRequestStreamProcessor.java @@ -0,0 +1,59 @@ +package uk.co.tfd.sm.util.http; + +import java.io.IOException; +import java.io.InputStream; +import java.util.List; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; + +import com.google.common.collect.Lists; + +public class ContentRequestStreamProcessor implements + RequestStreamProcessor { + + private Content content; + private ContentManager contentManager; + private ContentHelper contentHelper; + + public ContentRequestStreamProcessor(Content content, + ContentManager contentManager, ContentHelper contentHelper) { + this.content = content; + this.contentManager = contentManager; + this.contentHelper = contentHelper; + } + + @Override + public List processStream(String streamName, String fileName, String contentType, InputStream stream, + ModificationRequest modificationRequest) + throws StorageClientException, AccessDeniedException, IOException { + String alternativeStreamName = RequestUtils.getStreamName(streamName); + if ( fileName == null || fileName.length() == 0 ) { + fileName = RequestUtils.getFileName(streamName); + } + List feedback = Lists.newArrayList(); + String path = content.getPath(); + if (fileName != null) { + path = StorageClientUtils.newPath(path, fileName); + Content childContent = contentHelper.getOrCreateContent(path); + childContent.setProperty(StorageClientUtils.getAltField(Content.MIMETYPE_FIELD, alternativeStreamName), contentType); + contentHelper.applyProperties(childContent, modificationRequest); + } else { + // all properties to this point in a stream get + // saved to the upload object. + contentHelper.applyProperties(content, modificationRequest); + } + if (alternativeStreamName == null) { + contentManager.writeBody(path, stream); + feedback.add("Saved Stream " + fileName); + } else { + contentManager.writeBody(path, stream, alternativeStreamName); + feedback.add("Saved Stream " + fileName + ":" + alternativeStreamName); + } + return feedback; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ModificationRequest.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ModificationRequest.java new file mode 100644 index 00000000..67517401 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ModificationRequest.java @@ -0,0 +1,196 @@ +package uk.co.tfd.sm.util.http; + +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.fileupload.FileItemIterator; +import org.apache.commons.fileupload.FileItemStream; +import org.apache.commons.fileupload.FileUploadException; +import org.apache.commons.fileupload.servlet.ServletFileUpload; +import org.apache.commons.fileupload.util.Streams; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; + +/** + * This class deals with processing a POST request. It needs a stream processor + * to apply any streamed bodies within the request to something in the model. If + * there is no stream processor given, it will ignore non form parts of a + * multipart post. + * + * Loading a standard post with no streaming. + *
    + *      {@link Authorizable} authorizable = xxx;
    + * 		{@link AuthorizableHelper} authorizableHelper = new {@link AuthorizableHelper}(authorizableManager);
    + *		{@link ModificationRequest} modificationRequest = new {@link ModificationRequest}(null);
    + *
    + *      // process the request
    + *		modificationRequest.processRequest(request);
    + *		 
    + *		// apply the properties to the authorizable
    + *		authorizableHelper.applyProperties(authorizable, modificationRequest);
    + *		
    + *		// save everything that was modified
    + *		authorizableHelper.save();
    + *		
    + *		// get the feedback
    + *		List feedback = modificationRequest.getFeedback();
    + * 
    + * Loading a multipart post with streaming. + *
    + *      {@link Content} authorizable = xxx;
    + * 		{@link ContentHelper} contentHelper = new {@link ContentHelper}(contentManager);
    + *      {@link ContentRequestStreamProcessor} contentRquestStreamProcessor = new {@link ContentRequestStreamProcessor}(content, contentManager, contentHelper)
    + *		ModificationRequest modificationRequest = new {@link ModificationRequest}(contentRquestStreamProcessor);
    + *
    + *      // process the request
    + *		modificationRequest.processRequest(request);
    + *		 
    + *		// apply the properties to the authorizable
    + *		contentHelper.applyProperties(content, modificationRequest);
    + *		
    + *		// save everything that was modified
    + *		contentHelper.save();
    + *		
    + *		// get the feedback
    + *		List feedback = modificationRequest.getFeedback();
    + * 
    + * + * + * + * @author ieb + * + */ +public class ModificationRequest { + private static final Logger LOGGER = LoggerFactory + .getLogger(ModificationRequest.class); + private List feedback = Lists.newArrayList(); + private RequestStreamProcessor streamProcessor; + private Map> stores; + + /** + * Create a ModificationRequest that will handle streaming of bodies + * @param the type of the StreamProcessor. + * @param streamProcessor + */ + public ModificationRequest(RequestStreamProcessor streamProcessor) { + this.streamProcessor = streamProcessor; + Builder> b = ImmutableMap.builder(); + for ( ParameterType pt : ParameterType.values() ) { + Map m = Maps.newHashMap(); + b.put(pt, m); + } + stores = b.build(); + } + + /** + * Create a ModificationRequest that will ignore streamed bodies. + */ + public ModificationRequest() { + this(null); + } + + /** + * Process the request in a stream. + * + * @param request + * @throws IOException + * @throws FileUploadException + * @throws StorageClientException + * @throws AccessDeniedException + */ + public void processRequest(HttpServletRequest request) throws IOException, + FileUploadException, StorageClientException, AccessDeniedException { + boolean debug = LOGGER.isDebugEnabled(); + if (ServletFileUpload.isMultipartContent(request)) { + if (debug) { + LOGGER.debug("Multipart POST "); + } + feedback.add("Multipart Upload"); + ServletFileUpload upload = new ServletFileUpload(); + FileItemIterator iterator = upload.getItemIterator(request); + while (iterator.hasNext()) { + FileItemStream item = iterator.next(); + if (debug) { + LOGGER.debug("Got Item {}",item); + } + String name = item.getFieldName(); + InputStream stream = item.openStream(); + if (item.isFormField()) { + ParameterType pt = ParameterType.typeOfRequestParameter(name); + String propertyName = RequestUtils.propertyName(pt.getPropertyName(name)); + RequestUtils.accumulate(stores.get(pt), propertyName, RequestUtils.toValue(name, Streams.asString(stream))); + feedback.add(pt.feedback(propertyName)); + } else { + + if (streamProcessor != null) { + feedback.addAll(streamProcessor.processStream(name, StorageClientUtils.getObjectName(item.getName()), item.getContentType(), stream, this)); + } + + } + } + if (debug) { + LOGGER.debug("No More items "); + } + + } else { + if (debug) { + LOGGER.debug("Trad Post "); + } + // use traditional unstreamed operations. + @SuppressWarnings("unchecked") + Map parameters = request.getParameterMap(); + if (debug) { + LOGGER.debug("Traditional POST {} ", parameters); + } + Set> entries = parameters.entrySet(); + + for (Entry param : entries) { + String name = (String) param.getKey(); + ParameterType pt = ParameterType.typeOfRequestParameter(name); + String propertyName = RequestUtils.propertyName(pt.getPropertyName(name)); + RequestUtils.accumulate(stores.get(pt), propertyName, RequestUtils.toValue(name, param.getValue())); + feedback.add(pt.feedback(propertyName)); + } + } + } + + /** + * @return A map of the properties processed so far based on the stream + * recieved. + */ + public Map getParameterSet(ParameterType pt) { + return stores.get(pt); + } + + /** + * Clear the current set of properties to add and remove. + */ + public void resetProperties() { + for ( Entry> e : stores.entrySet()) { + e.getValue().clear(); + } + } + + /** + * @return feedback from the resquest processing. + */ + public List getFeedback() { + return feedback; + } +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ParameterType.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ParameterType.java new file mode 100644 index 00000000..b9248bec --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ParameterType.java @@ -0,0 +1,88 @@ +package uk.co.tfd.sm.util.http; + +/** + * Enumeration of property types. + * @author ieb + * + */ +public enum ParameterType { + REMOVE("Removed ","","@Delete"), + ADD("Added ","",""), + OPERATION("Added ",":",""), + SPECIAL("Added ","",":") + ; + + private static ParameterType[] CHECK_SEQUENCE = new ParameterType[] { + REMOVE, + SPECIAL, + OPERATION, + ADD + }; + + private String feedback; + private String prefix; + private String suffix; + + private int prefixLength; + + private int suffixLength; + + private ParameterType(String feedback, String prefix, String suffix) { + this.feedback = feedback; + this.prefix = prefix; + this.suffix = suffix; + this.prefixLength = prefix.length(); + this.suffixLength = suffix.length(); + + } + + /** + * @param propertyName + * @return feedback message for the request parameter. + */ + public String feedback(String propertyName) { + return feedback+propertyName; + } + + /** + * @param name + * @return the property name including any value type information but excluding ParameterType information. + */ + public String getPropertyName(String name) { + if ( name == null ) { + return name; + } + return name.substring(prefixLength, name.length()-suffixLength); + } + + /** + * @param propertyName + * @return a ParameterType name. + */ + public String getParameterName(String propertyName) { + return prefix+propertyName+suffix; + } + + /** + * @param name + * @return the ParameterType for the supplied ParameterName + */ + public static ParameterType typeOfRequestParameter(String name) { + if ( name == null || name.length() == 1) { + return ADD; + } + for ( ParameterType p : CHECK_SEQUENCE) { + if ( p.prefixLength+p.suffixLength == 0 ) { + return p; + } else if ( p.prefixLength == 0 && name.endsWith(p.suffix)) { + return p; + } else if ( p.suffixLength == 0 && name.startsWith(p.prefix)) { + return p; + } else if ( name.startsWith(p.prefix) && name.endsWith(p.suffix)) { + return p; + } + } + return ADD; + } + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/RequestStreamProcessor.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/RequestStreamProcessor.java new file mode 100644 index 00000000..bd9f6027 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/RequestStreamProcessor.java @@ -0,0 +1,25 @@ +package uk.co.tfd.sm.util.http; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collection; + +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; + +/** + * A stream processor knows how to process a sections of a stream of data. + * + * @author ieb + * + * @param + * The type of parent object the stream processor works on. + */ +public interface RequestStreamProcessor { + + Collection processStream(String streamName, + String fileName, String contentType, InputStream stream, + ModificationRequest modificationRequest) + throws StorageClientException, AccessDeniedException, IOException; + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/RequestUtils.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/RequestUtils.java new file mode 100644 index 00000000..111263e1 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/RequestUtils.java @@ -0,0 +1,189 @@ +package uk.co.tfd.sm.util.http; + +import java.lang.reflect.Array; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.resource.types.BooleanType; +import uk.co.tfd.sm.resource.types.CalendarType; +import uk.co.tfd.sm.resource.types.DoubleType; +import uk.co.tfd.sm.resource.types.IntegerType; +import uk.co.tfd.sm.resource.types.LongType; +import uk.co.tfd.sm.resource.types.RequestParameterType; +import uk.co.tfd.sm.resource.types.StringType; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; + +public class RequestUtils { + + private static final Logger LOGGER = LoggerFactory + .getLogger(RequestUtils.class); + @SuppressWarnings("unchecked") + private static final Class>[] TYPE_CLASSES = new Class[] { + StringType.class, + IntegerType.class, + LongType.class, + BooleanType.class, + CalendarType.class, + DoubleType.class}; + private static final Map> TYPES = createScalarTypes(); + + + private static Map> createScalarTypes() { + Builder> b = ImmutableMap.builder(); + for (Class> typeClass : TYPE_CLASSES) { + try { + RequestParameterType o = typeClass.newInstance(); + b.put(o.getType(), o); + } catch (InstantiationException e) { + LOGGER.error(e.getMessage(), e); + } catch (IllegalAccessException e) { + LOGGER.error(e.getMessage(), e); + } + } + return b.build(); + } + + /** + * @param name + * the property name complete with type hints operations or + * stream names. + * @return Just the property name excluding [] markers and all type, stream + * hints. + */ + public static String propertyName(String name) { + String[] parts = StringUtils.split(name, "@", 2); + if (parts == null || parts.length == 0 ) { + return null; + } + String propertyName = parts[0]; + if (propertyName.endsWith("[]")) { + propertyName = propertyName.substring(0, propertyName.length() - 2); + } + return propertyName; + } + + /** + * Creates the most suitable type. + * + * @param name + * the name. Types are specified as name@Type where Type is the + * name of the type. If the name ends with a [] the type is + * forced to be an array. + * @param value + * the value the value. If its an array, the type is assumed to + * be an array provided there is more than one element. If there + * is one element and the name ends with [] an array is created, + * otherwise a scalar is created. + * @return an instance + */ + public static Object toValue(String name, Object value) { + String[] parts = StringUtils.split(name, "@", 2); + String fieldName = null; + String fieldType = "String"; + if (parts.length == 2) { + fieldType = parts[1]; + fieldName = parts[0]; + } else if (parts.length == 1) { + fieldName = parts[0]; + } else { + throw new IllegalArgumentException("Invalid property name"); + } + try { + int l = Array.getLength(value); + RequestParameterType rpt = TYPES.get(fieldType); + if (rpt == null) { + rpt = TYPES.get(RequestParameterType.STRING); + } + if (!fieldName.endsWith("[]") && l == 1) { + return rpt.newInstance(Array.get(value, 0)); + } + Class componentType = rpt.getComponentType(); + Object[] a = (Object[]) Array.newInstance(componentType, l); + for (int i = 0; i < l; i++) { + a[i] = rpt.newInstance(Array.get(value, i)); + } + return a; + } catch (IllegalArgumentException e) { + RequestParameterType rpt = TYPES.get(fieldType); + if (rpt == null) { + rpt = TYPES.get(RequestParameterType.STRING); + } + return rpt.newInstance(value); + } + } + + /** + * @param name + * the property name, with the stream specified as + * name@StreamName. Only file uploads have stream names. + * @return the stream name or null if there is none. + */ + public static String getStreamName(String name) { + + String[] parts = StringUtils.split(name, "@", 3); + if (parts != null && parts.length >= 2) { + return parts[1]; + } + return null; + } + + /** + * @param name the property name + * @return the name of the file taking into account any alternative name + */ + public static String getFileName(String name) { + String[] parts = StringUtils.split(name, "@", 2); + if (parts != null && parts.length > 1) { + return parts[0]; + } + return name; + } + + public static void accumulate(Map toAdd, String propertyName, + Object value) { + + Object o = toAdd.get(propertyName); + if (o == null) { + toAdd.put(propertyName, value); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Saved {} {}", propertyName, value); + } + } else { + int sl = 1; + try { + sl = Array.getLength(o); + } catch (IllegalArgumentException e) { + Object[] newO = (Object[]) Array.newInstance(o.getClass(), 1); + newO[0] = o; + o = newO; + } + int vl = 1; + try { + vl = Array.getLength(value); + } catch (IllegalArgumentException e) { + Object[] newO = (Object[]) Array.newInstance(value.getClass(), + 1); + newO[0] = value; + value = newO; + } + Object type = Array.get(o, 0); + Object[] newArray = (Object[]) Array.newInstance(type.getClass(), + sl + vl); + System.arraycopy(o, 0, newArray, 0, sl); + System.arraycopy(value, 0, newArray, sl, vl); + toAdd.put(propertyName, newArray); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Appended {} {} {}", new Object[] { propertyName, + value, newArray }); + } + } + } + + + +} diff --git a/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ResponseUtils.java b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ResponseUtils.java new file mode 100644 index 00000000..52524447 --- /dev/null +++ b/extensions/resource/src/main/java/uk/co/tfd/sm/util/http/ResponseUtils.java @@ -0,0 +1,106 @@ +package uk.co.tfd.sm.util.http; + +import java.io.IOException; +import java.io.OutputStream; +import java.io.UnsupportedEncodingException; +import java.util.Calendar; +import java.util.List; + +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.Status.Family; +import javax.ws.rs.core.Response.StatusType; + +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.content.Content; + +import uk.co.tfd.sm.util.gson.adapters.AuthorizableTypeAdapter; +import uk.co.tfd.sm.util.gson.adapters.CalenderTypeAdapter; +import uk.co.tfd.sm.util.gson.adapters.ContentTypeAdapter; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +public class ResponseUtils { + + public static Response getResponse(final int code, final String message) { + Family family = Family.SERVER_ERROR; + if ( code < 100 ) { + family = Family.OTHER; + } else if ( code < 200 ) { + family = Family.INFORMATIONAL; + } else if ( code < 300 ) { + family = Family.SUCCESSFUL; + } else if ( code < 400 ) { + family = Family.REDIRECTION; + } else if ( code < 500 ) { + family = Family.CLIENT_ERROR; + } + final Family ffamily = family; + return Response.status(new StatusType() { + + public int getStatusCode() { + return code; + } + + public String getReasonPhrase() { + return message; + } + + public Family getFamily() { + return ffamily; + } + }).build(); + } + + public static void writeTree(Content content, String[] selectors, OutputStream output) throws UnsupportedEncodingException, IOException { + GsonBuilder gb = new GsonBuilder(); + if ( contains(selectors, "pp", "tidy") ) { + gb.setPrettyPrinting(); + } + int recursion = 0; + if ( contains(selectors, "-1", "infinity")) { + recursion = Integer.MAX_VALUE; + } else if ( selectors != null && selectors.length > 0 ) { + try { + recursion = Integer.parseInt(selectors[selectors.length-1]); + } catch ( NumberFormatException e ) { + recursion = 0; + } + } + gb.registerTypeHierarchyAdapter(Content.class, new ContentTypeAdapter(recursion)); + gb.registerTypeHierarchyAdapter(Calendar.class, new CalenderTypeAdapter()); + Gson gson = gb.create(); + output.write(gson.toJson(content).getBytes("UTF-8")); + } + + public static void writeTree(Authorizable authorizable, String format, OutputStream output) throws UnsupportedEncodingException, IOException { + GsonBuilder gb = new GsonBuilder(); + if ( "pp.json".equals(format) || "tidy.json".equals(format) ) { + gb.setPrettyPrinting(); + } + gb.registerTypeHierarchyAdapter(Authorizable.class, new AuthorizableTypeAdapter()); + gb.registerTypeHierarchyAdapter(Calendar.class, new CalenderTypeAdapter()); + Gson gson = gb.create(); + output.write(gson.toJson(authorizable).getBytes("UTF-8")); + } + + private static boolean contains(String[] selectors, String ... value) { + if ( selectors == null ) { + return false; + } + for ( String s : selectors) { + for ( String v : value ) { + if (v.equals(s)) { + return true; + } + } + } + return false; + } + + public static void writeFeedback(List feedback, OutputStream output) throws UnsupportedEncodingException, IOException { + output.write(new GsonBuilder().setPrettyPrinting().create().toJson(feedback).getBytes("UTF-8")); + } + + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/authorizables/RestAuthorizableManagerTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/authorizables/RestAuthorizableManagerTest.java new file mode 100644 index 00000000..a2b15b81 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/authorizables/RestAuthorizableManagerTest.java @@ -0,0 +1,105 @@ +package uk.co.tfd.sm.authorizables; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.StreamingOutput; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.SparseSessionTracker; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.lite.BaseMemoryRepository; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.api.authn.AuthenticationService; +import uk.co.tfd.sm.util.http.ParameterUtil; + +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; + + +public class RestAuthorizableManagerTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(RestAuthorizableManagerTest.class); + private BaseMemoryRepository baseMemoryRepository; + private Session session; + private RestAuthorizableManager restAuthorizableManager; + @Mock + private AuthenticationService authenticationService; + @Mock + private SparseSessionTracker sparseSessionTracker; + @Mock + private HttpServletRequest request; + private Session userSession; + + public RestAuthorizableManagerTest() { + MockitoAnnotations.initMocks(this); + } + @Before + public void before() throws ClientPoolException, StorageClientException, AccessDeniedException, ClassNotFoundException, IOException { + baseMemoryRepository = new BaseMemoryRepository(); + session = baseMemoryRepository.getRepository().loginAdministrative(); + restAuthorizableManager = new RestAuthorizableManager(); + restAuthorizableManager.authenticationService = authenticationService; + restAuthorizableManager.sessionTracker = sparseSessionTracker; + + session.getAuthorizableManager().createUser("testuser", "TestUser", "testpassword", null); + userSession = baseMemoryRepository.getRepository().loginAdministrative("testuser"); + + + + } + + + @Test + public void test() throws StorageClientException, WebApplicationException, IOException { + Mockito.when(authenticationService.authenticate(request)).thenReturn(session); + Mockito.when(sparseSessionTracker.register(session, request)).thenReturn(session); + Response response = restAuthorizableManager.getUser(request, "user", "testuser", "pp"); + Assert.assertNotNull(response); + Assert.assertEquals(200, response.getStatus()); + StreamingOutput entity = (StreamingOutput) response.getEntity(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + entity.write(baos); + String output = new String(baos.toByteArray(),"UTF-8"); + JsonParser parser = new JsonParser(); + LOGGER.info("Response {} ",output); + JsonObject o = parser.parse(output).getAsJsonObject(); + Assert.assertEquals("admin",o.get(Authorizable.CREATED_BY_FIELD).getAsString()); + Assert.assertEquals("testuser",o.get(Authorizable.ID_FIELD).getAsString()); + Assert.assertEquals("TestUser",o.get(Authorizable.NAME_FIELD).getAsString()); + Assert.assertEquals("u",o.get("type").getAsString()); + } + + + @Test + public void testUpdate() throws StorageClientException, WebApplicationException, IOException, AccessDeniedException { + Mockito.when(authenticationService.authenticate(request)).thenReturn(session); + Mockito.when(sparseSessionTracker.register(session, request)).thenReturn(session); + Mockito.when(request.getParameterMap()).thenReturn(ParameterUtil.getParameters()); + Mockito.when(request.getMethod()).thenReturn("POST"); + Response response = restAuthorizableManager.doUpdateAuthorizable(request, "user", "testuser"); + Assert.assertNotNull(response); + Assert.assertEquals(200, response.getStatus()); + StreamingOutput entity = (StreamingOutput) response.getEntity(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + entity.write(baos); + String output = new String(baos.toByteArray(),"UTF-8"); + ParameterUtil.checkResponse(output); + ParameterUtil.testProperties(session.getAuthorizableManager().findAuthorizable("testuser").getProperties()); + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/integration/resource/DataTypesTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/integration/resource/DataTypesTest.java new file mode 100644 index 00000000..f275f8b9 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/integration/resource/DataTypesTest.java @@ -0,0 +1,135 @@ +package uk.co.tfd.sm.integration.resource; + +import java.io.IOException; +import java.util.List; + +import org.apache.http.auth.AuthenticationException; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.auth.BasicScheme; +import org.apache.http.message.BasicNameValuePair; +import org.junit.Test; + +import uk.co.tfd.sm.integration.HttpTestUtils; +import uk.co.tfd.sm.integration.IntegrationServer; +import uk.co.tfd.sm.integration.JsonTestUtils; + +import com.google.common.collect.Lists; +import com.google.gson.JsonObject; + +public class DataTypesTest { + + private static final String ADMIN_USER = "admin"; + private static final String ADMIN_PASSWORD = "admin"; + private static final String APPLICATION_JSON = "application/json; charset=utf-8"; + private HttpTestUtils httpTestUtils = new HttpTestUtils(); + + public DataTypesTest() throws IOException { + IntegrationServer.start(); + } + + + @Test + public void testBooleanType() throws AuthenticationException, + ClientProtocolException, IOException { + Boolean testsingle = true; + Boolean[] testproperty = new Boolean[] { true, + false, true, true }; + Boolean testarray = false; + JsonObject json = testType("Boolean", testsingle, testproperty, + testarray); + JsonTestUtils.checkProperty(json, "testproperty", testproperty); + JsonTestUtils.checkProperty(json, "testarray", new Boolean[] {testarray}); + JsonTestUtils.checkProperty(json, "testsingle", testsingle); + } + + @Test + public void testCalendarType() throws AuthenticationException, + ClientProtocolException, IOException { + String testsingle = "2011-01-30"; + String[] testproperty = new String[] { "2011-02-20", "2011-03-21", "2011-03-22", + "2011-04-23T11:23:13+11:30" }; + String testarray = "2011-06-30T09:12:01Z"; + JsonObject json = testType("Calendar", testsingle, testproperty, + testarray); + JsonTestUtils.checkProperty(json, "testproperty", testproperty); + JsonTestUtils.checkProperty(json, "testarray", new String[] {testarray}); + JsonTestUtils.checkProperty(json, "testsingle", testsingle); + + } + + @Test + public void testDoubleType() throws AuthenticationException, + ClientProtocolException, IOException { + Double testsingle = 1000010010100102010210.0023; + Double[] testproperty = new Double[] { 1.01, -1.02, 1.03, -1.04 }; + Double testarray = 2.2; + JsonObject json = testType("Double", testsingle, testproperty, + testarray); + JsonTestUtils.checkProperty(json, "testproperty", testproperty); + JsonTestUtils.checkProperty(json, "testarray", new Double[]{testarray}); + JsonTestUtils.checkProperty(json, "testsingle", testsingle); + } + + @Test + public void testIntegerType() throws AuthenticationException, + ClientProtocolException, IOException { + Integer testsingle = 1001; + Integer[] testproperty = new Integer[] { 101, -102, 103, -104 }; + Integer testarray = 22; + JsonObject json = testType("Integer", testsingle, testproperty, + testarray); + JsonTestUtils.checkProperty(json, "testproperty", testproperty); + JsonTestUtils.checkProperty(json, "testarray", new Integer[]{testarray}); + JsonTestUtils.checkProperty(json, "testsingle", testsingle); + } + + + @Test + public void testLongType() throws AuthenticationException, + ClientProtocolException, IOException { + Long testsingle = Long.MAX_VALUE; + Long[] testproperty = new Long[] { 101L, Long.MIN_VALUE, 103L, Long.MAX_VALUE }; + Long testarray = Long.MIN_VALUE; + JsonObject json = testType("Long", testsingle, testproperty, + testarray); + JsonTestUtils.checkProperty(json, "testproperty", testproperty); + JsonTestUtils.checkProperty(json, "testarray", new Long[]{testarray}); + JsonTestUtils.checkProperty(json, "testsingle", testsingle); + } + + + private JsonObject testType(String type, Object testsingle, + Object[] testproperty, Object testarray) + throws AuthenticationException, ClientProtocolException, + IOException { + String resource = "/" + this.getClass().getName() + "/test" + type + + "Type" + System.currentTimeMillis(); + String resourceUrl = IntegrationServer.BASEURL + resource; + HttpPost post = new HttpPost(resourceUrl); + UsernamePasswordCredentials creds = new UsernamePasswordCredentials( + ADMIN_USER, ADMIN_PASSWORD); + post.addHeader(new BasicScheme().authenticate(creds, post)); + List v = Lists.newArrayList(); + v.add(new BasicNameValuePair("testsingle@" + type, String + .valueOf(testsingle))); + for (Object o : testproperty) { + v.add(new BasicNameValuePair("testproperty@" + type, String + .valueOf(o))); + } + v.add(new BasicNameValuePair("testarray[]@" + type, String + .valueOf(testarray))); + UrlEncodedFormEntity form = new UrlEncodedFormEntity(v); + post.setEntity(form); + post.setHeader("Referer","/integratriontest/"+this.getClass().getName()); + httpTestUtils.execute(post, 200, APPLICATION_JSON); + + JsonObject json = JsonTestUtils.toJsonObject(httpTestUtils.get( + resourceUrl + ".pp.json", 200, APPLICATION_JSON)); + JsonTestUtils.checkProperty(json, "_path", resource); + return json; + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/integration/resource/DefaultResourceTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/integration/resource/DefaultResourceTest.java new file mode 100644 index 00000000..92bd1389 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/integration/resource/DefaultResourceTest.java @@ -0,0 +1,158 @@ +package uk.co.tfd.sm.integration.resource; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.Random; +import java.util.Set; + +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpResponse; +import org.apache.http.auth.AuthenticationException; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.mime.MultipartEntity; +import org.apache.http.entity.mime.content.ByteArrayBody; +import org.apache.http.entity.mime.content.StringBody; +import org.apache.http.impl.auth.BasicScheme; +import org.apache.http.message.BasicNameValuePair; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import uk.co.tfd.sm.integration.HttpTestUtils; +import uk.co.tfd.sm.integration.IntegrationServer; +import uk.co.tfd.sm.integration.JsonTestUtils; + +import com.google.common.collect.Lists; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +public class DefaultResourceTest { + + private static final String ADMIN_USER = "admin"; + private static final String ADMIN_PASSWORD = "admin"; + private static final String APPLICATION_JSON = "application/json; charset=utf-8"; + private static final Charset UTF8 = Charset.forName("UTF-8"); + private Random random; + private HttpTestUtils httpTestUtils = new HttpTestUtils(); + + public DefaultResourceTest() throws IOException { + IntegrationServer.start(); + } + + @Before + public void before() throws IOException { + random = new Random(0xDEADBEEF); + } + + @Test + public void testPost() throws ClientProtocolException, IOException, + AuthenticationException { + String resource = "/" + this.getClass().getName() + "/testPost" + + System.currentTimeMillis(); + String resourceUrl = IntegrationServer.BASEURL + resource; + HttpPost post = new HttpPost(resourceUrl); + UrlEncodedFormEntity form = new UrlEncodedFormEntity( + Lists.newArrayList(new BasicNameValuePair("testproperty", + "testvalue"))); + post.setEntity(form); + + httpTestUtils.execute(post, 403, null); + + post = new HttpPost(resourceUrl); + UsernamePasswordCredentials creds = new UsernamePasswordCredentials( + ADMIN_USER, ADMIN_PASSWORD); + post.addHeader(new BasicScheme().authenticate(creds, post)); + form = new UrlEncodedFormEntity(Lists.newArrayList( + new BasicNameValuePair("testproperty", "testvalue1"), + new BasicNameValuePair("testproperty", "testvalue2"), + new BasicNameValuePair("testproperty", "testvalue3"), + new BasicNameValuePair("testproperty", "testvalue4"), + new BasicNameValuePair("testproperty", "testvalue5"), + new BasicNameValuePair("testint[]@Integer", "1001"))); + post.setEntity(form); + post.setHeader("Referer","/integratriontest/"+this.getClass().getName()); + httpTestUtils.execute(post, 200, APPLICATION_JSON); + + JsonObject json = JsonTestUtils.toJsonObject(httpTestUtils.get( + resourceUrl + ".pp.json", 200, APPLICATION_JSON)); + + JsonTestUtils.checkProperty(json, "testproperty", new String[] { + "testvalue1", "testvalue2", "testvalue3", "testvalue4", + "testvalue5" }); + + JsonTestUtils.checkProperty(json, "testint", new int[] { 1001 }); + + JsonTestUtils.checkProperty(json, "_path", resource); + } + + + @Test + public void testUpload() throws ClientProtocolException, IOException, + AuthenticationException { + String resource = "/" + this.getClass().getName() + "/testUpload" + + System.currentTimeMillis(); + String resourceUrl = IntegrationServer.BASEURL + resource; + HttpPost post = new HttpPost(resourceUrl); + MultipartEntity multipartEntity = new MultipartEntity(); + multipartEntity + .addPart("title", new StringBody("TestUploadFail", UTF8)); + multipartEntity.addPart("desc", new StringBody("TestUploadFail", UTF8)); + byte[] b = new byte[10240]; + random.nextBytes(b); + ByteArrayBody bab = new ByteArrayBody(b, "testUpload.bin", "test/bin"); + multipartEntity.addPart("fileX", bab); + + post.setEntity(multipartEntity); + + httpTestUtils.execute(post, 403, null); + + // do it again and authenticate + post = new HttpPost(resourceUrl); + multipartEntity = new MultipartEntity(); + multipartEntity.addPart("title", new StringBody("TestUploadPassTitle", + UTF8)); + multipartEntity.addPart("desc", new StringBody("TestUploadPass", UTF8)); + bab = new ByteArrayBody(b, "test/bin", "testUpload.bin"); + multipartEntity.addPart("fileA", bab); + + UsernamePasswordCredentials creds = new UsernamePasswordCredentials( + ADMIN_USER, ADMIN_PASSWORD); + post.addHeader(new BasicScheme().authenticate(creds, post)); + post.setEntity(multipartEntity); + post.setHeader("Referer","/integratriontest/"+this.getClass().getName()); + JsonElement jsonElement = httpTestUtils.execute(post, 200, + APPLICATION_JSON); + System.err.println(jsonElement); + Set responseSet = JsonTestUtils.toResponseSet(jsonElement); + + Assert.assertTrue(responseSet.contains("Multipart Upload")); + Assert.assertTrue(responseSet.contains("Added title")); + Assert.assertTrue(responseSet.contains("Added desc")); + Assert.assertTrue(responseSet.contains("Saved Stream testUpload.bin")); + + HttpResponse response = httpTestUtils.get(resourceUrl + "/testUpload.bin"); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + byte[] responseBody = IOUtils.toByteArray(response.getEntity() + .getContent()); + Assert.assertArrayEquals(b, responseBody); + + // subpaths that dont exist should give 404 on GET + httpTestUtils.get(resourceUrl + "/testUpload.bin/subpath.pp.json", 404, null); + + JsonObject fileProperties = JsonTestUtils.toJsonObject(httpTestUtils + .get(resourceUrl + "/testUpload.bin.pp.json", 200, APPLICATION_JSON)); + + // we should get the object we asked for + JsonTestUtils.checkProperty(fileProperties, "_path", resource + + "/testUpload.bin"); + + JsonTestUtils.checkProperty(fileProperties, "desc", "TestUploadPass"); + JsonTestUtils.checkProperty(fileProperties, "title", + "TestUploadPassTitle"); + + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/integration/resource/Utf8Test.java b/extensions/resource/src/test/java/uk/co/tfd/sm/integration/resource/Utf8Test.java new file mode 100644 index 00000000..0617cba4 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/integration/resource/Utf8Test.java @@ -0,0 +1,95 @@ +package uk.co.tfd.sm.integration.resource; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.util.List; + +import org.apache.http.auth.AuthenticationException; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.auth.BasicScheme; +import org.apache.http.message.BasicNameValuePair; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.integration.HttpTestUtils; +import uk.co.tfd.sm.integration.IntegrationServer; +import uk.co.tfd.sm.integration.JsonTestUtils; + +import com.google.common.collect.Lists; +import com.google.gson.JsonObject; + +public class Utf8Test { + private static final String ADMIN_USER = "admin"; + private static final String ADMIN_PASSWORD = "admin"; + private static final String APPLICATION_JSON = "application/json; charset=utf-8"; + private static final Logger LOGGER = LoggerFactory + .getLogger(Utf8Test.class); + private HttpTestUtils httpTestUtils = new HttpTestUtils(); + private String resource; + private String resourceUrl; + + public Utf8Test() throws IOException { + IntegrationServer.start(); + resource = "/" + this.getClass().getName() + "/testUtfTest" + + System.currentTimeMillis(); + resourceUrl = IntegrationServer.BASEURL + resource; + } + + @Test + public void testUtf8() throws IOException, AuthenticationException { + BufferedReader br = new BufferedReader(new InputStreamReader(this + .getClass().getResourceAsStream("utf8.testpatterns"), "UTF-8")); + String line = br.readLine(); + int i = 0; + int low = Integer.MAX_VALUE; + int high = Integer.MIN_VALUE; + while (line != null) { + testUtf8Pattern(line, new String[] { line, line, line }, line); + i++; + for (int j = 0; j < line.length(); j++) { + int cp = Character.codePointAt(line, j); + high = Math.max(high, cp); + low = Math.min(low, cp); + } + LOGGER.info( + "Passed Test Pattern {} {} Code Point Range tested from 0x{} to 0x{}", + new Object[] { i, + line.substring(0, Math.min(line.length(), 20)), + Integer.toHexString(low), Integer.toHexString(high) }); + line = br.readLine(); + } + br.close(); + } + + private JsonObject testUtf8Pattern(String testsingle, + String[] testproperty, String testarray) + throws AuthenticationException, ClientProtocolException, + IOException { + HttpPost post = new HttpPost(resourceUrl); + UsernamePasswordCredentials creds = new UsernamePasswordCredentials( + ADMIN_USER, ADMIN_PASSWORD); + post.addHeader(new BasicScheme().authenticate(creds, post)); + List v = Lists.newArrayList(); + v.add(new BasicNameValuePair("testsingle", testsingle)); + for (String o : testproperty) { + v.add(new BasicNameValuePair("testproperty", o)); + } + v.add(new BasicNameValuePair("testarray[]@String", testarray)); + UrlEncodedFormEntity form = new UrlEncodedFormEntity(v); + post.setEntity(form); + post.setHeader("Referer","/integratriontest/"+this.getClass().getName()); + httpTestUtils.execute(post, 200, APPLICATION_JSON); + + JsonObject json = JsonTestUtils.toJsonObject(httpTestUtils.get( + resourceUrl + ".pp.json", 200, APPLICATION_JSON)); + JsonTestUtils.checkProperty(json, "_path", resource); + + return json; + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/resource/BasicAuthenticationHanderTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/BasicAuthenticationHanderTest.java new file mode 100644 index 00000000..938eede2 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/BasicAuthenticationHanderTest.java @@ -0,0 +1,64 @@ +package uk.co.tfd.sm.resource; + +import java.io.UnsupportedEncodingException; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.codec.binary.Base64; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; + +import uk.co.tfd.sm.api.authn.AuthenticationServiceCredentials; +import uk.co.tfd.sm.authn.BasicAuthenticationHandler; + +public class BasicAuthenticationHanderTest { + + private HttpServletRequest request; + private BasicAuthenticationHandler bh; + + @Before + public void before() { + bh = new BasicAuthenticationHandler(); + request = Mockito.mock(HttpServletRequest.class); + } + + @Test + public void testNone() { + Mockito.when(request.getHeader("Authorization")).thenReturn(null); + Assert.assertNull(bh.getCredentials(request)); + } + + @Test + public void testInvalid() { + Mockito.when(request.getHeader("Authorization")).thenReturn("blasdflkj"); + Assert.assertNull(bh.getCredentials(request)); + } + + @Test + public void testInvalidNoPassword() throws UnsupportedEncodingException { + Mockito.when(request.getHeader("Authorization")).thenReturn("Basic "+Base64.encodeBase64URLSafeString("user:".getBytes("UTF-8"))); + AuthenticationServiceCredentials credentials = bh.getCredentials(request); + Assert.assertNull(credentials); + } + + @Test + public void testPassword() throws UnsupportedEncodingException { + Mockito.when(request.getHeader("Authorization")).thenReturn("Basic "+Base64.encodeBase64URLSafeString("user:password".getBytes("UTF-8"))); + AuthenticationServiceCredentials credentials = bh.getCredentials(request); + Assert.assertNotNull(credentials); + Assert.assertEquals("user", credentials.getUserName()); + Assert.assertEquals("password", credentials.getPassword()); + } + + @Test + public void testPasswordColon() throws UnsupportedEncodingException { + Mockito.when(request.getHeader("Authorization")).thenReturn("Basic "+Base64.encodeBase64URLSafeString("user:pas:sword".getBytes("UTF-8"))); + AuthenticationServiceCredentials credentials = bh.getCredentials(request); + Assert.assertNotNull(credentials); + Assert.assertEquals("user", credentials.getUserName()); + Assert.assertEquals("pas:sword", credentials.getPassword()); + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/resource/DefaultResourceHandlerTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/DefaultResourceHandlerTest.java new file mode 100644 index 00000000..7f58f6ea --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/DefaultResourceHandlerTest.java @@ -0,0 +1,125 @@ +package uk.co.tfd.sm.resource; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.InOrder; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.SparseSessionTracker; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; + +import uk.co.tfd.sm.authn.AuthenticationServiceImpl; + +public class DefaultResourceHandlerTest { + + @Mock + private Repository repository; + @Mock + private SparseSessionTracker sparseSessionTracker; + private DefaultResourceHandler defaultResourceHandler; + @Mock + private Session session; + @Mock + private ContentManager contentManager; + @Mock + private HttpServletRequest request; + @Mock + private HttpServletResponse response; + + public DefaultResourceHandlerTest() { + MockitoAnnotations.initMocks(this); + } + + @Before + public void setup() { + defaultResourceHandler = new DefaultResourceHandler(); + defaultResourceHandler.authenticationService = new AuthenticationServiceImpl(repository); + defaultResourceHandler.resourceFactory = new ResponseFactoryManagerImpl(); + defaultResourceHandler.sessionTracker = sparseSessionTracker; + } + + @Test + public void testResource() throws StorageClientException, AccessDeniedException { + String path = "/test/2/3/4.xxx.yyy.zz.json"; + Mockito.when(sparseSessionTracker.get(request)).thenReturn(null, session); + Mockito.when(repository.login()).thenReturn(session); + Mockito.when(sparseSessionTracker.register(session, request)).thenReturn(session); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + Mockito.when(request.getMethod()).thenReturn("GET"); + Content content = new Content("/test/2/3/4", null); + Mockito.when(contentManager.get("/test/2/3/4")).thenReturn(content); + defaultResourceHandler.getResource(request, response, path); + InOrder order = Mockito.inOrder(contentManager); + order.verify(contentManager).get("/test/2/3/4.xxx.yyy.zz.json"); + order.verify(contentManager).get("/test/2/3/4.xxx.yyy.zz"); + order.verify(contentManager).get("/test/2/3/4.xxx.yyy"); + order.verify(contentManager).get("/test/2/3/4.xxx"); + order.verify(contentManager).get("/test/2/3/4"); + } + + + @Test + public void testBareResource() throws StorageClientException, AccessDeniedException { + String path = "/test/2/3/4"; + Mockito.when(sparseSessionTracker.get(request)).thenReturn(null, session); + Mockito.when(repository.login()).thenReturn(session); + Mockito.when(sparseSessionTracker.register(session, request)).thenReturn(session); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + Mockito.when(request.getMethod()).thenReturn("GET"); + Content content = new Content("/test/2/3/4", null); + Mockito.when(contentManager.get("/test/2/3/4")).thenReturn(content); + defaultResourceHandler.getResource(request, response, path); + InOrder order = Mockito.inOrder(contentManager); + order.verify(contentManager).get("/test/2/3/4"); + } + @Test + public void testNoResource() throws StorageClientException, AccessDeniedException { + String path = "/testnon.existant/2/3/4.a.b.c"; + Mockito.when(sparseSessionTracker.get(request)).thenReturn(null, session); + Mockito.when(repository.login()).thenReturn(session); + Mockito.when(sparseSessionTracker.register(session, request)).thenReturn(session); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + Mockito.when(request.getMethod()).thenReturn("GET"); + Content content = new Content("/test/2/3/4", null); + Mockito.when(contentManager.get("/test/2/3/4")).thenReturn(content); + defaultResourceHandler.getResource(request, response, path); + InOrder order = Mockito.inOrder(contentManager); + order.verify(contentManager).get("/testnon.existant/2/3/4.a.b.c"); + order.verify(contentManager).get("/testnon.existant/2/3/4.a.b"); + order.verify(contentManager).get("/testnon.existant/2/3/4.a"); + order.verify(contentManager).get("/testnon.existant/2/3/4"); + order.verify(contentManager).get("/testnon.existant/2/3"); + order.verify(contentManager).get("/testnon.existant/2"); + order.verify(contentManager).get("/testnon.existant"); + } + + @Test + public void testFolder() throws StorageClientException, AccessDeniedException { + String path = "/test/2/3/4.xxx.yyy.zz.json"; + Mockito.when(sparseSessionTracker.get(request)).thenReturn(null, session); + Mockito.when(repository.login()).thenReturn(session); + Mockito.when(sparseSessionTracker.register(session, request)).thenReturn(session); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + Mockito.when(request.getMethod()).thenReturn("GET"); + Content content = new Content("/test/2/3", null); + Mockito.when(contentManager.get("/test/2/3")).thenReturn(content); + defaultResourceHandler.getResource(request, response, path); + InOrder order = Mockito.inOrder(contentManager); + order.verify(contentManager).get("/test/2/3/4.xxx.yyy.zz.json"); + order.verify(contentManager).get("/test/2/3/4.xxx.yyy.zz"); + order.verify(contentManager).get("/test/2/3/4.xxx.yyy"); + order.verify(contentManager).get("/test/2/3/4.xxx"); + order.verify(contentManager).get("/test/2/3/4"); + order.verify(contentManager).get("/test/2/3"); + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/resource/DefaultResponseFactoryTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/DefaultResponseFactoryTest.java new file mode 100644 index 00000000..987f7111 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/DefaultResponseFactoryTest.java @@ -0,0 +1,264 @@ +package uk.co.tfd.sm.resource; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import java.util.Date; +import java.util.Iterator; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.StreamingOutput; + +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.sakaiproject.nakamura.lite.BaseMemoryRepository; + +import uk.co.tfd.sm.api.resource.Adaptable; +import uk.co.tfd.sm.api.resource.Resource; +import uk.co.tfd.sm.api.resource.ResponseFactory; +import uk.co.tfd.sm.api.resource.binding.ResponseBindingList; +import uk.co.tfd.sm.api.resource.binding.RuntimeResponseBinding; +import uk.co.tfd.sm.util.http.ParameterUtil; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; + +public class DefaultResponseFactoryTest { + + @Mock + private Adaptable adaptable; + @Mock + private Resource resource; + @Mock + private Session session; + @Mock + private ContentManager contentManager; + @Mock + private Content content; + + public DefaultResponseFactoryTest() { + MockitoAnnotations.initMocks(this); + } + + @Test + public void testGetBindings() { + DefaultResponseFactory fac = new DefaultResponseFactory(); + ResponseBindingList bl = fac.getBindings(); + Assert.assertNotNull(bl); + Iterator i = bl.iterator(); + int n = 0; + while (i.hasNext()) { + n++; + RuntimeResponseBinding b = i.next(); + Assert.assertNotNull(b); + } + Assert.assertTrue(n > 0); + } + + @Test + public void testGetResource() throws IOException, StorageClientException, AccessDeniedException { + DefaultResponseFactory fac = new DefaultResponseFactory(); + Adaptable gr = fac.getResponse(adaptable); + Assert.assertNotNull(gr); + DefaultResponse dg = (DefaultResponse) gr; + Mockito.when(adaptable.adaptTo(Resource.class)).thenReturn(resource); + Mockito.when(adaptable.adaptTo(Session.class)).thenReturn(session); + Mockito.when(adaptable.adaptTo(Content.class)).thenReturn(content); + Mockito.when(content.getPath()).thenReturn("/test/path"); + Mockito.when(resource.getToCreatePath()).thenReturn("/test/path"); + Mockito.when(adaptable.adaptTo(Date.class)).thenReturn(new Date()); + Mockito.when(resource.getRequestExt()).thenReturn(null); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + byte[] data = "TEST".getBytes("UTF-8"); + InputStream in = new ByteArrayInputStream(data); + Mockito.when(contentManager.getInputStream(Mockito.anyString())).thenReturn(in); + + + Response response = dg.doGet(); + Object o = response.getEntity(); + int status = response.getStatus(); + Assert.assertEquals(200, status); + StreamingOutput out = (StreamingOutput) o; + ByteArrayOutputStream output = new ByteArrayOutputStream(); + out.write(output); + byte[] outputData = output.toByteArray(); + Assert.assertArrayEquals(data, outputData); + + } + + @Test + public void testGetResourceJson() throws IOException, StorageClientException, AccessDeniedException { + DefaultResponseFactory fac = new DefaultResponseFactory(); + Adaptable gr = fac.getResponse(adaptable); + Assert.assertNotNull(gr); + DefaultResponse dg = (DefaultResponse) gr; + Mockito.when(adaptable.adaptTo(Resource.class)).thenReturn(resource); + Mockito.when(adaptable.adaptTo(Session.class)).thenReturn(session); + Mockito.when(adaptable.adaptTo(Content.class)).thenReturn(content); + Mockito.when(content.getPath()).thenReturn("/test/path"); + Mockito.when(resource.getToCreatePath()).thenReturn("/test/path"); + Mockito.when(adaptable.adaptTo(Date.class)).thenReturn(new Date()); + Mockito.when(resource.getRequestExt()).thenReturn("json"); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + Mockito.when(content.getProperties()).thenReturn(ImmutableMap.of("key", (Object)"value")); + Response response = dg.doGet(); + Object o = response.getEntity(); + Assert.assertEquals("application/json; charset=utf-8",response.getMetadata().get("Content-Type").get(0).toString()); + int status = response.getStatus(); + Assert.assertEquals(200, status); + StreamingOutput out = (StreamingOutput) o; + ByteArrayOutputStream output = new ByteArrayOutputStream(); + out.write(output); + String outputData = new String(output.toByteArray(), "UTF-8"); + Assert.assertEquals("{\"key\":\"value\"}", outputData); + } + @Test + public void testGetResourceXml() throws IOException, StorageClientException, AccessDeniedException { + DefaultResponseFactory fac = new DefaultResponseFactory(); + Adaptable gr = fac.getResponse(adaptable); + Assert.assertNotNull(gr); + DefaultResponse dg = (DefaultResponse) gr; + Mockito.when(adaptable.adaptTo(Resource.class)).thenReturn(resource); + Mockito.when(adaptable.adaptTo(Session.class)).thenReturn(session); + Mockito.when(adaptable.adaptTo(Content.class)).thenReturn(content); + Mockito.when(content.getPath()).thenReturn("/test/path"); + Mockito.when(resource.getToCreatePath()).thenReturn("/test/path"); + + Mockito.when(adaptable.adaptTo(Date.class)).thenReturn(new Date()); + Mockito.when(resource.getRequestExt()).thenReturn("xml"); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + Mockito.when(content.getProperties()).thenReturn(ImmutableMap.of("key", (Object)"value")); + Response response = dg.doGet(); + Assert.assertEquals("application/xml; charset=utf-8",response.getMetadata().get("Content-Type").get(0).toString()); + int status = response.getStatus(); + Assert.assertEquals(200, status); + } + + @Test + public void testGetResourceInvalid() throws IOException, StorageClientException, AccessDeniedException { + DefaultResponseFactory fac = new DefaultResponseFactory(); + Adaptable gr = fac.getResponse(adaptable); + Assert.assertNotNull(gr); + DefaultResponse dg = (DefaultResponse) gr; + Mockito.when(adaptable.adaptTo(Resource.class)).thenReturn(resource); + Mockito.when(adaptable.adaptTo(Session.class)).thenReturn(session); + Mockito.when(adaptable.adaptTo(Content.class)).thenReturn(content); + Mockito.when(content.getPath()).thenReturn("/test/path"); + Mockito.when(resource.getToCreatePath()).thenReturn("/test/path"); + Mockito.when(adaptable.adaptTo(Date.class)).thenReturn(new Date()); + Mockito.when(resource.getRequestExt()).thenReturn("invalid"); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + Mockito.when(content.getProperties()).thenReturn(ImmutableMap.of("key", (Object)"value")); + Response response = dg.doGet(); + int status = response.getStatus(); + Assert.assertEquals(400, status); + } + + @Test + public void testGetResourceDenied() throws IOException, StorageClientException, AccessDeniedException { + DefaultResponseFactory fac = new DefaultResponseFactory(); + Adaptable gr = fac.getResponse(adaptable); + Assert.assertNotNull(gr); + DefaultResponse dg = (DefaultResponse) gr; + Mockito.when(adaptable.adaptTo(Resource.class)).thenReturn(resource); + Mockito.when(adaptable.adaptTo(Session.class)).thenReturn(session); + Mockito.when(adaptable.adaptTo(Content.class)).thenReturn(content); + Mockito.when(content.getPath()).thenReturn("/test/path"); + Mockito.when(resource.getToCreatePath()).thenReturn("/test/path"); + Mockito.when(adaptable.adaptTo(Date.class)).thenReturn(new Date()); + Mockito.when(resource.getRequestExt()).thenReturn(null); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + Mockito.when(contentManager.getInputStream(Mockito.anyString())).thenThrow(new AccessDeniedException("test","test","test","test")); + Mockito.when(content.getProperties()).thenReturn(ImmutableMap.of("key", (Object)"value")); + Response response = dg.doGet(); + int status = response.getStatus(); + Assert.assertEquals(403, status); + } + + @Test + public void testGetResourceError() throws IOException, StorageClientException, AccessDeniedException { + DefaultResponseFactory fac = new DefaultResponseFactory(); + Adaptable gr = fac.getResponse(adaptable); + Assert.assertNotNull(gr); + DefaultResponse dg = (DefaultResponse) gr; + Mockito.when(adaptable.adaptTo(Resource.class)).thenReturn(resource); + Mockito.when(adaptable.adaptTo(Session.class)).thenReturn(session); + Mockito.when(adaptable.adaptTo(Content.class)).thenReturn(content); + Mockito.when(content.getPath()).thenReturn("/test/path"); + Mockito.when(resource.getToCreatePath()).thenReturn("/test/path"); + Mockito.when(adaptable.adaptTo(Date.class)).thenReturn(new Date()); + Mockito.when(resource.getRequestExt()).thenReturn(null); + Mockito.when(session.getContentManager()).thenReturn(contentManager); + Mockito.when(contentManager.getInputStream(Mockito.anyString())).thenThrow(new StorageClientException("test")); + Mockito.when(content.getProperties()).thenReturn(ImmutableMap.of("key", (Object)"value")); + Response response = dg.doGet(); + int status = response.getStatus(); + Assert.assertEquals(500, status); + } + + @Test + public void testSort() throws IOException, StorageClientException, AccessDeniedException { + List factories = Lists.newArrayList( + (ResponseFactory)new DefaultResponseFactory(), + new DefaultResponseFactory()); + Collections.sort(factories); + } + + + @Test + public void testPostResource() throws IOException, StorageClientException, AccessDeniedException, ClassNotFoundException { + BaseMemoryRepository repository = new BaseMemoryRepository(); + Session adminSession = repository.getRepository().loginAdministrative(); + + ContentManager contentManager = adminSession.getContentManager(); + contentManager.update(new Content("/test/path",null)); + Content realContent = contentManager.get("/test/path"); + + DefaultResponseFactory fac = new DefaultResponseFactory(); + Adaptable gr = fac.getResponse(adaptable); + Assert.assertNotNull(gr); + DefaultResponse dg = (DefaultResponse) gr; + Mockito.when(adaptable.adaptTo(Resource.class)).thenReturn(resource); + Mockito.when(adaptable.adaptTo(Session.class)).thenReturn(adminSession); + Mockito.when(adaptable.adaptTo(Content.class)).thenReturn(realContent); + Mockito.when(resource.getToCreatePath()).thenReturn("/test/path"); + Mockito.when(adaptable.adaptTo(Date.class)).thenReturn(new Date()); + Mockito.when(resource.getRequestExt()).thenReturn(null); + + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + Mockito.when(adaptable.adaptTo(HttpServletRequest.class)).thenReturn(request); + Mockito.when(request.getMethod()).thenReturn("POST"); + Mockito.when(request.getParameterMap()).thenReturn(ParameterUtil.getParameters()); + + + Response response = dg.doPost(); + Object o = response.getEntity(); + int status = response.getStatus(); + Assert.assertEquals(200, status); + StreamingOutput out = (StreamingOutput) o; + ByteArrayOutputStream output = new ByteArrayOutputStream(); + out.write(output); + String outputData = new String(output.toByteArray(),"UTF-8"); + ParameterUtil.checkResponse(outputData); + + Content finalContent = contentManager.get("/test/path"); + ParameterUtil.testProperties(finalContent.getProperties()); + + + + } + + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/resource/ResourceImplTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/ResourceImplTest.java new file mode 100644 index 00000000..c702acc1 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/ResourceImplTest.java @@ -0,0 +1,64 @@ +package uk.co.tfd.sm.resource; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.MediaType; + +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.content.Content; + +import uk.co.tfd.sm.api.resource.Adaptable; + +public class ResourceImplTest { + + @Test + public void test() { + Content content = new Content("/test/1/2/3", null); + Session session = Mockito.mock(Session.class); + HttpServletResponse response = Mockito.mock(HttpServletResponse.class); + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + Adaptable parent = Mockito.mock(Adaptable.class); + // simple cases + ResourceImpl resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3", "/test/1/2/3/4/5/6.xxx.yyy.z", "/test/1/2/3/4/5/6"); + check(resourceImpl,"/4/5/6.xxx.yyy.z","z","6","/test/1/2/3/4/5/6.xxx.yyy.z",new String[]{"xxx","yyy"},"/test/1/2/3",MediaType.APPLICATION_OCTET_STREAM); + resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3", "/test/1/2/3/4/5/6.xxx.z", "/test/1/2/3/4/5/6"); + check(resourceImpl,"/4/5/6.xxx.z","z","6","/test/1/2/3/4/5/6.xxx.z",new String[]{"xxx"},"/test/1/2/3",MediaType.APPLICATION_OCTET_STREAM); + resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3", "/test/1/2/3/4/5/6.z", "/test/1/2/3/4/5/6"); + check(resourceImpl,"/4/5/6.z","z","6","/test/1/2/3/4/5/6.z",new String[]{},"/test/1/2/3",MediaType.APPLICATION_OCTET_STREAM); + resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3", "/test/1/2/3/4/5/6", "/test/1/2/3/4/5/6"); + check(resourceImpl,"/4/5/6","","6","/test/1/2/3/4/5/6",new String[]{},"/test/1/2/3",MediaType.APPLICATION_OCTET_STREAM); + // harder cases + resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3", "/test/1/2/3/4/5/6.xxx.yyy..z", "/test/1/2/3/4/5/6"); + check(resourceImpl,"/4/5/6.xxx.yyy..z","z","6","/test/1/2/3/4/5/6.xxx.yyy..z",new String[]{"xxx","yyy"},"/test/1/2/3",MediaType.APPLICATION_OCTET_STREAM); + content.setProperty(Content.LASTMODIFIED_FIELD, System.currentTimeMillis()); + content.setProperty(Content.MIMETYPE_FIELD,"text/html"); + resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3", "/test/1/2/3/4/5/6...xxx.z", "/test/1/2/3/4/5/6"); + check(resourceImpl,"/4/5/6...xxx.z","z","6","/test/1/2/3/4/5/6...xxx.z",new String[]{"xxx"},"/test/1/2/3","text/html"); + resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3", "/test/1/2/3/4/5/6.z.", "/test/1/2/3/4/5/6"); + check(resourceImpl,"/4/5/6.z.","z","6","/test/1/2/3/4/5/6.z.",new String[]{},"/test/1/2/3","text/html"); + resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3", "/test/1/2/3/4/5/6", "/test/1/2/3/4/5/6"); + check(resourceImpl,"/4/5/6","","6","/test/1/2/3/4/5/6",new String[]{},"/test/1/2/3","text/html"); + + content.setProperty(Content.RESOURCE_TYPE_FIELD,"sparse/content"); + + resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3", "/test/1/2/3/", "/test/1/2/3/4/5/6"); + check(resourceImpl,"/","","","/test/1/2/3/",new String[]{},"/test/1/2/3","sparse/content"); + resourceImpl = new ResourceImpl(parent, request, response, session, content, "/test/1/2/3/", "/test/1/2/3/abc", "/test/1/2/3/4/5/6"); + check(resourceImpl,"abc","","abc","/test/1/2/3/abc",new String[]{},"/test/1/2/3/","sparse/content"); + + + } + + private void check(ResourceImpl resourceImpl, String pathInfo, String extensions, String requestName, String requestPath, String[] requestSelectors, String resolvedPath, String resourceType) { + Assert.assertEquals(pathInfo, resourceImpl.getPathInfo()); + Assert.assertEquals(extensions, resourceImpl.getRequestExt()); + Assert.assertEquals(requestName, resourceImpl.getRequestName()); + Assert.assertEquals(requestPath, resourceImpl.getRequestPath()); + Assert.assertArrayEquals(requestSelectors, resourceImpl.getRequestSelectors()); + Assert.assertEquals(resolvedPath, resourceImpl.getResolvedPath()); + Assert.assertEquals(resourceType, resourceImpl.getResourceType()); + } +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/resource/ResponseFactoryManagerImplTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/ResponseFactoryManagerImplTest.java new file mode 100644 index 00000000..1baf65b2 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/resource/ResponseFactoryManagerImplTest.java @@ -0,0 +1,144 @@ +package uk.co.tfd.sm.resource; + +import java.util.List; +import java.util.Random; + +import junit.framework.Assert; + +import org.junit.Test; +import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Lists; + +import uk.co.tfd.sm.api.resource.Adaptable; +import uk.co.tfd.sm.api.resource.ResponseFactory; +import uk.co.tfd.sm.api.resource.binding.ResponseBindingList; +import uk.co.tfd.sm.api.resource.binding.RuntimeResponseBinding; + +public class ResponseFactoryManagerImplTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(ResponseFactoryManagerImplTest.class); + + @Test + public void testBind() { + ResponseFactoryManagerImpl r = new ResponseFactoryManagerImpl(); + + ResponseFactory rf1 = new DefaultResponseFactory(); + ResponseFactory rf2 = Mockito.mock(ResponseFactory.class); + ResponseFactory rf3 = Mockito.mock(ResponseFactory.class); + Mockito.when(rf2.getBindings()).thenReturn( new ResponseBindingList( + new RuntimeResponseBinding("GET",null,null,null), + new RuntimeResponseBinding("GET","test/type",null,null), + new RuntimeResponseBinding("POST","test/type","create",null), + new RuntimeResponseBinding("GET","test/type","all","json"), + new RuntimeResponseBinding("GET","test/type2",null,null) + )); + Mockito.when(rf3.getBindings()).thenReturn( new ResponseBindingList( + new RuntimeResponseBinding("GET",null,null,null), + new RuntimeResponseBinding("GET","test/type",null,null), + new RuntimeResponseBinding("POST","test/type","create",null), + new RuntimeResponseBinding("GET","test/type","all","json"), + new RuntimeResponseBinding("GET","test/type3",null,null) + )); + r.bind(rf1); + r.bind(rf2); + r.bind(rf3); + r.unbind(rf1); + r.unbind(rf3); + r.unbind(rf2); + } + + @Test + public void testDefaultGetResponse() { + ResponseFactoryManagerImpl r = new ResponseFactoryManagerImpl(); + Adaptable resource = Mockito.mock(Adaptable.class); + Mockito.when(resource.adaptTo(ResponseBindingList.class)).thenReturn(new ResponseBindingList(new RuntimeResponseBinding("GET","test/type",null,"json"))); + Adaptable response = r.createResponse(resource); + Assert.assertNotNull(response); + Assert.assertTrue(response instanceof DefaultResponse); + } + + @Test + public void testGetResponse() { + ResponseFactoryManagerImpl r = new ResponseFactoryManagerImpl(); + ResponseFactory rf1 = Mockito.mock(ResponseFactory.class); + Mockito.when(rf1.getBindings()).thenReturn( new ResponseBindingList( + new RuntimeResponseBinding("GET",null,null,null), + new RuntimeResponseBinding("GET","test/type",null,null), + new RuntimeResponseBinding("POST","test/type","create",null), + new RuntimeResponseBinding("GET","test/type","all","json"), + new RuntimeResponseBinding("GET","test/type2",null,null) + )); + r.bind(rf1); + + Adaptable resource = Mockito.mock(Adaptable.class); + Mockito.when(resource.adaptTo(ResponseBindingList.class)).thenReturn( + new ResponseBindingList(new RuntimeResponseBinding("POST","test/type","create","json"))); + Adaptable rf1response = Mockito.mock(Adaptable.class); + Mockito.when(rf1.getResponse(resource)).thenReturn(rf1response); + + + + Adaptable response = r.createResponse(resource); + Assert.assertNotNull(response); + Assert.assertFalse(response instanceof DefaultResponse); + } + + @Test + public void testScalingGetResponse() { + ResponseFactoryManagerImpl rm = new ResponseFactoryManagerImpl(); + Random r = new Random(); + String[] methods = populate(r,4); + String[] types = populate(r,100); + String[] selectors = populate(r,100); + String[] extensions = populate(r,2); + int nbindings = 500; + long s = System.currentTimeMillis(); + for ( int i = 0; i < nbindings; i++) { + int l = r.nextInt(4); + List rl = Lists.newArrayList(); + for ( int k = 0; k < l; k++) { + int im = r.nextInt(methods.length); + int it = r.nextInt(types.length); + int is = r.nextInt(selectors.length); + int ie = r.nextInt(extensions.length); + rl.add(new RuntimeResponseBinding(methods[im], types[it], selectors[is], extensions[ie])); + } + ResponseFactory rf1 = Mockito.mock(ResponseFactory.class); + Mockito.when(rf1.getBindings()).thenReturn( new ResponseBindingList(rl.toArray(new RuntimeResponseBinding[rl.size()]))); + rm.bind(rf1); + } + long e = System.currentTimeMillis(); + LOGGER.info("Average Add time for {} bindings is {} ms",nbindings,(((double)(e-s))/(double)nbindings)); + + s = System.currentTimeMillis(); + int ntest = 1000; + Adaptable resource = Mockito.mock(Adaptable.class); + for ( int i = 0; i < ntest; i++ ) { + int im = r.nextInt(methods.length-1)+1; + int it = r.nextInt(types.length-1)+1; + int is = r.nextInt(selectors.length-1)+1; + int ie = r.nextInt(extensions.length-1)+1; + Mockito.when(resource.adaptTo(ResponseBindingList.class)).thenReturn( + new ResponseBindingList(new RuntimeResponseBinding(methods[im], types[it], selectors[is], extensions[ie]))); + @SuppressWarnings("unused") + Adaptable response = rm.createResponse(resource); + } + e = System.currentTimeMillis(); + LOGGER.info("Average Resolution time for {} bindings is {} ms",nbindings,(((double)(e-s))/(double)ntest)); + + } + + private String[] populate(Random r, int i) { + String[] s = new String[i+2]; + s[0] = BindingSearchKey.NONE; + s[1] = BindingSearchKey.ANY; + for ( int k = 2; k < s.length; k++ ) { + s[k] = String.valueOf(r.nextInt()); + } + return s; + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/util/gson/adapters/ContentTypeAdapterTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/util/gson/adapters/ContentTypeAdapterTest.java new file mode 100644 index 00000000..66638b4b --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/util/gson/adapters/ContentTypeAdapterTest.java @@ -0,0 +1,59 @@ +package uk.co.tfd.sm.util.gson.adapters; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.util.List; + +import junit.framework.Assert; + +import org.junit.Test; +import org.mockito.Mockito; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.util.ISO8601Date; + +import uk.co.tfd.sm.util.http.ResponseUtils; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; + +public class ContentTypeAdapterTest { + + + private static final String TEST_PATTERN = "{\n" + + " \"prop1\": \"2011-02-01T00:00:00Z\",\n" + + " \"1\": {\n" + + " \"prop2\": \"2011-02-02T00:00:00Z\"\n" + + " },\n" + + " \"2\": {\n" + + " \"prop3\": \"2011-02-03T00:00:00Z\"\n" + + " }\n" + + "}"; + + + @Test + public void test() throws UnsupportedEncodingException, IOException { + Content content = Mockito.mock(Content.class); + Mockito.when(content.getPath()).thenReturn("/path/a/b/c"); + Content contentChild1 = Mockito.mock(Content.class); + Content contentChild2 = Mockito.mock(Content.class); + Mockito.when(contentChild1.getPath()).thenReturn("/path/a/b/c/1"); + Mockito.when(contentChild2.getPath()).thenReturn("/path/a/b/c/2"); + Mockito.when(content.getProperties()).thenReturn( ImmutableMap.of("prop1", (Object)new ISO8601Date("20110201T000000Z"))); + Mockito.when(contentChild1.getProperties()).thenReturn(ImmutableMap.of("prop2", (Object)new ISO8601Date("20110202T000000Z"))); + Mockito.when(contentChild2.getProperties()).thenReturn(ImmutableMap.of("prop3", (Object)new ISO8601Date("20110203T000000Z"))); + + + List children = Lists.newArrayList(contentChild1, contentChild2); + Mockito.when(content.listChildren()).thenReturn(children); + List nochildren = ImmutableList.of(); + Mockito.when(contentChild1.listChildren()).thenReturn(nochildren); + Mockito.when(contentChild2.listChildren()).thenReturn(nochildren); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ResponseUtils.writeTree(content, new String[]{"-1","pp"}, baos); + String result = baos.toString("UTF-8"); + Assert.assertEquals(TEST_PATTERN, result); + + } +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/ModificationRequestTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/ModificationRequestTest.java new file mode 100644 index 00000000..07d29705 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/ModificationRequestTest.java @@ -0,0 +1,154 @@ +package uk.co.tfd.sm.util.http; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.util.List; +import java.util.Map; + +import javax.servlet.ServletInputStream; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.fileupload.FileUploadException; +import org.apache.commons.io.IOUtils; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; +import org.sakaiproject.nakamura.api.lite.authorizable.AuthorizableManager; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.sakaiproject.nakamura.lite.BaseMemoryRepository; + +import com.google.common.collect.Lists; + +public class ModificationRequestTest { + + protected static final String BOUNDARY = "mimeboundary1"; + + @Test + public void test() throws IOException, FileUploadException, + StorageClientException, AccessDeniedException { + ModificationRequest m = new ModificationRequest(); + + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + Mockito.when(request.getMethod()).thenReturn("POST"); + + + Map parameters = ParameterUtil.getParameters(); + + Mockito.when(request.getParameterMap()).thenReturn(parameters); + m.processRequest(request); + + ParameterUtil.testParameters(m); + + } + + + + + @Test + public void testStreamContent() throws IOException, FileUploadException, + StorageClientException, AccessDeniedException, ClassNotFoundException { + BaseMemoryRepository memoryRepository = new BaseMemoryRepository(); + Session adminSession = memoryRepository.getRepository().loginAdministrative(); + ContentManager contentManager = adminSession.getContentManager(); + ContentHelper contentHelper = new ContentHelper(contentManager); + + String path = "/test/content"; + contentManager.update(new Content(path,null)); + Content content = contentManager.get(path); + ContentRequestStreamProcessor contentRequestStreamProcessor = new ContentRequestStreamProcessor(content, contentManager, contentHelper); + + ModificationRequest m = new ModificationRequest(contentRequestStreamProcessor); + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + Mockito.when(request.getMethod()).thenReturn("POST"); + Mockito.when(request.getContentType()).thenReturn("multipart/form-data; boundary="+BOUNDARY); + List> parts = Lists.newArrayList(); + parts.add(Lists.newArrayList("Content-Disposition: form-data; name=\"a\"","value_for_a")); + parts.add(Lists.newArrayList("Content-Disposition: form-data; name=\"b\"","value_for_b")); + parts.add(Lists.newArrayList("Content-Disposition: form-data; name=\"c\"; filename=\"testfile.txt\" ","Content-Type: text/plain","A very short file")); + parts.add(Lists.newArrayList("Content-Disposition: form-data; name=\"a\"","value_for_a2")); + parts.add(Lists.newArrayList("Content-Disposition: form-data; name=\"b\"","value_for_b2")); + parts.add(Lists.newArrayList("Content-Disposition: form-data; name=\"b\"","value_for_b3")); + final InputStream postInputStream = getMultipatStream(BOUNDARY,parts); + + ServletInputStream inputStream = new ServletInputStream() { + @Override + public int read() throws IOException { + return postInputStream.read(); + } + }; + Mockito.when(request.getInputStream()).thenReturn(inputStream); + m.processRequest(request); + contentHelper.applyProperties(content, m); + contentHelper.save(); + + List feedback = m.getFeedback(); + Assert.assertArrayEquals(new String[]{"Multipart Upload","Added a","Added b","Saved Stream testfile.txt","Added a","Added b","Added b"}, feedback.toArray()); + + Content c = contentManager.get(path); + Assert.assertNotNull(c); + Assert.assertEquals("value_for_a2", c.getProperty("a")); + Assert.assertArrayEquals(new String[]{"value_for_b2","value_for_b3"}, (Object[]) c.getProperty("b")); + c = contentManager.get(path+"/testfile.txt"); + Assert.assertNotNull(c); + Assert.assertEquals("value_for_a", c.getProperty("a")); + Assert.assertEquals("value_for_b", c.getProperty("b")); + Assert.assertEquals("text/plain", c.getProperty(Content.MIMETYPE_FIELD)); + Assert.assertEquals("A very short file", IOUtils.toString(contentManager.getInputStream(path+"/testfile.txt"),"UTF-8")); + + } + + + @Test + public void testUpdateAuthorizable() throws ClientPoolException, StorageClientException, AccessDeniedException, ClassNotFoundException, IOException, FileUploadException { + BaseMemoryRepository memoryRepository = new BaseMemoryRepository(); + Session adminSession = memoryRepository.getRepository().loginAdministrative(); + AuthorizableManager authorizableManager = adminSession.getAuthorizableManager(); + AuthorizableHelper authorizableHelper = new AuthorizableHelper(authorizableManager); + + Authorizable a = authorizableHelper.getOrCreateAuthorizable("testuser", "user"); + ModificationRequest modificationRequest = new ModificationRequest(null); + + Map parameters = ParameterUtil.getParameters(); + + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + Mockito.when(request.getMethod()).thenReturn("POST"); + Mockito.when(request.getParameterMap()).thenReturn(parameters); + + modificationRequest.processRequest(request); + + ParameterUtil.testParameters(modificationRequest); + + authorizableHelper.applyProperties(a, modificationRequest); + authorizableHelper.save(); + + Authorizable authorizable = authorizableManager.findAuthorizable("testuser"); + + ParameterUtil.testProperties(authorizable.getProperties()); + + + } + + protected InputStream getMultipatStream(String boundary, List> parts) throws UnsupportedEncodingException { + StringBuilder sb = new StringBuilder(); + sb.append("preeamble"); + for ( List part : parts) { + sb.append("\r\n--").append(boundary).append("\r\n"); + for ( int i = 0; i < part.size()-1; i++) { + sb.append(part.get(i)).append("\r\n"); + } + sb.append("\r\n"); + sb.append(part.get(part.size()-1)); + } + sb.append("\r\n--").append(boundary).append("--\r\n"); + return new ByteArrayInputStream(sb.toString().getBytes("UTF-8")); + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/ParameterUtil.java b/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/ParameterUtil.java new file mode 100644 index 00000000..f9185893 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/ParameterUtil.java @@ -0,0 +1,70 @@ +package uk.co.tfd.sm.util.http; + +import java.util.List; +import java.util.Map; + +import org.junit.Assert; + +import com.google.common.collect.Maps; + +public class ParameterUtil { + public static void testParameters(ModificationRequest m) { + Map toAdd = m.getParameterSet(ParameterType.ADD); + Map operation = m + .getParameterSet(ParameterType.OPERATION); + Map remove = m.getParameterSet(ParameterType.REMOVE); + Map special = m.getParameterSet(ParameterType.SPECIAL); + Assert.assertEquals(3, toAdd.size()); + Assert.assertEquals(1, operation.size()); + Assert.assertEquals(1, remove.size()); + Assert.assertEquals(2, special.size()); + Assert.assertEquals("testString", toAdd.get("test")); + Assert.assertArrayEquals(new String[] { "testArray" }, + (Object[]) toAdd.get("testArray")); + Assert.assertArrayEquals(new String[] { "testArraySet", "testArraySet", + "testArraySet" }, (Object[]) toAdd.get("testArraySet")); + Assert.assertArrayEquals(new String[] { "testArraySet", "testArraySet", + "testArraySet" }, (Object[]) remove.get("testDelete")); + Assert.assertArrayEquals(new String[] { "Operation4", "Operation21", + "Operation2" }, (Object[]) operation.get("testOperation")); + Assert.assertArrayEquals(new String[] { "Special3", "Special2", + "Special1" }, (Object[]) special.get("testSpecial")); + Assert.assertArrayEquals(new String[] { "Special21", "Special22", + "Special3" }, (Object[]) special.get("testSpecial2")); + List feedback = m.getFeedback(); + Assert.assertArrayEquals(new String[] { "Added test", + "Added testArray", "Added testArraySet", "Removed testDelete", + "Added testOperation", "Added testSpecial", + "Added testSpecial2" }, feedback.toArray()); + } + + public static Map getParameters() { + Map parameters = Maps.newLinkedHashMap(); + parameters.put("test", new String[] { "testString" }); + parameters.put("testArray[]", new String[] { "testArray" }); + parameters.put("testArraySet", new String[] { "testArraySet", + "testArraySet", "testArraySet" }); + parameters.put("testDelete@Delete", new String[] { "testArraySet", + "testArraySet", "testArraySet" }); + parameters.put(":testOperation", new String[] { "Operation4", + "Operation21", "Operation2" }); + parameters.put("testSpecial:", new String[] { "Special3", "Special2", + "Special1" }); + parameters.put("testSpecial2:", new String[] { "Special21", + "Special22", "Special3" }); + return parameters; + } + + public static void testProperties(Map properties) { + Assert.assertEquals("testString", properties.get("test")); + Assert.assertArrayEquals(new String[] { "testArray" }, + (Object[]) properties.get("testArray")); + Assert.assertArrayEquals(new String[] { "testArraySet", "testArraySet", + "testArraySet" }, (Object[]) properties.get("testArraySet")); + } + + public static void checkResponse(String outputData) { + Assert.assertEquals("[\n \"Added test\",\n \"Added testArray\",\n \"Added testArraySet\",\n \"Removed testDelete\",\n \"Added testOperation\",\n \"Added testSpecial\",\n \"Added testSpecial2\"\n]", outputData); + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/RequestUtilsTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/RequestUtilsTest.java new file mode 100644 index 00000000..ec1f1c0c --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/RequestUtilsTest.java @@ -0,0 +1,102 @@ +package uk.co.tfd.sm.util.http; + +import java.util.Calendar; + +import org.junit.Assert; +import org.junit.Test; + +public class RequestUtilsTest { + + @Test + public void testGetFileName() { + Assert.assertEquals("testname.bin",RequestUtils.getFileName("testname.bin")); + Assert.assertEquals("testname",RequestUtils.getFileName("testname@stream1")); + Assert.assertEquals("testname",RequestUtils.getFileName("testname@stream1@xxx")); + Assert.assertNull(RequestUtils.getFileName(null)); + } + + @Test + public void testGetStreamName() { + Assert.assertEquals(null,RequestUtils.getStreamName("testname")); + Assert.assertEquals("stream1",RequestUtils.getStreamName("testname@stream1")); + Assert.assertEquals("stream1",RequestUtils.getStreamName("testname@stream1@xxx")); + Assert.assertNull(RequestUtils.getStreamName(null)); + Assert.assertNull(RequestUtils.getStreamName("")); + Assert.assertNull(RequestUtils.getStreamName("filedefault")); + Assert.assertEquals("x",RequestUtils.getStreamName("filedefault@x")); + Assert.assertEquals("x",RequestUtils.getStreamName("filedefault@x@sdfsdf")); + Assert.assertEquals("x....",RequestUtils.getStreamName("filedefault@x....@sdfsdf")); + } + + @Test + public void testPropertyName() { + Assert.assertNull(RequestUtils.propertyName(null)); + Assert.assertNull(RequestUtils.propertyName("")); + Assert.assertEquals("propname",RequestUtils.propertyName("propname")); + Assert.assertEquals("propname",RequestUtils.propertyName("propname@x")); + Assert.assertEquals("propname",RequestUtils.propertyName("propname[]@x@sdfsdf")); + Assert.assertEquals("propname",RequestUtils.propertyName("propname@x....@sdfsdf")); + } + + @Test + public void testPropertyDelete() { + Assert.assertEquals(ParameterType.REMOVE, ParameterType.typeOfRequestParameter("something@Delete")); + Assert.assertEquals(ParameterType.ADD, ParameterType.typeOfRequestParameter("something@NotDelete")); + Assert.assertEquals(ParameterType.ADD, ParameterType.typeOfRequestParameter("something@")); + Assert.assertEquals(ParameterType.ADD, ParameterType.typeOfRequestParameter("something")); + Assert.assertEquals(ParameterType.SPECIAL, ParameterType.typeOfRequestParameter("something:")); + Assert.assertEquals(ParameterType.OPERATION, ParameterType.typeOfRequestParameter(":something")); + + } + + @Test + public void testToValue() { + Assert.assertEquals("S",RequestUtils.toValue("test", "S")); + Assert.assertEquals("S",RequestUtils.toValue("test", new String[]{"S"})); + Assert.assertArrayEquals(new String[]{"S"},(String[])RequestUtils.toValue("test[]", new String[]{"S"})); + Assert.assertArrayEquals(new String[]{"S","B"},(String[])RequestUtils.toValue("test", new String[]{"S","B"})); + Assert.assertArrayEquals(new String[]{"S","B"},(String[])RequestUtils.toValue("test[]", new String[]{"S","B"})); + Assert.assertEquals("S",RequestUtils.toValue("test@String", "S")); + Assert.assertEquals("S",RequestUtils.toValue("test@String", new String[]{"S"})); + Assert.assertArrayEquals(new String[]{"S"},(String[])RequestUtils.toValue("test[]@String", new String[]{"S"})); + Assert.assertArrayEquals(new String[]{"S","B"},(String[])RequestUtils.toValue("test@String", new String[]{"S","B"})); + Assert.assertArrayEquals(new String[]{"S","B"},(String[])RequestUtils.toValue("test[]@String", new String[]{"S","B"})); + + Assert.assertEquals(1,RequestUtils.toValue("test@Integer", "1")); + Assert.assertEquals(2,RequestUtils.toValue("test@Integer", new String[]{"2"})); + Assert.assertArrayEquals(new Integer[]{3},(Integer[])RequestUtils.toValue("test[]@Integer", new String[]{"3"})); + Assert.assertArrayEquals(new Integer[]{4,5},(Integer[])RequestUtils.toValue("test@Integer", new Integer[]{4,5})); + Assert.assertArrayEquals(new Integer[]{7,6},(Integer[])RequestUtils.toValue("test[]@Integer", new String[]{"7","6"})); + + Assert.assertEquals(1L,RequestUtils.toValue("test@Long", "1")); + Assert.assertEquals(2L,RequestUtils.toValue("test@Long", new String[]{"2"})); + Assert.assertArrayEquals(new Long[]{3L},(Long[])RequestUtils.toValue("test[]@Long", new String[]{"3"})); + Assert.assertArrayEquals(new Long[]{4L,5L},(Long[])RequestUtils.toValue("test@Long", new String[]{"4","5"})); + Assert.assertArrayEquals(new Long[]{7L,6L},(Long[])RequestUtils.toValue("test[]@Long", new Long[]{7L,6L})); + + Assert.assertEquals(true,RequestUtils.toValue("test@Boolean", "true")); + Assert.assertEquals(false,RequestUtils.toValue("test@Boolean", new String[]{"F"})); + Assert.assertArrayEquals(new Boolean[]{true},(Boolean[])RequestUtils.toValue("test[]@Boolean", new String[]{"true"})); + Assert.assertArrayEquals(new Boolean[]{true, false},(Boolean[])RequestUtils.toValue("test@Boolean", new Boolean[]{true,false})); + Assert.assertArrayEquals(new Boolean[]{false, true},(Boolean[])RequestUtils.toValue("test[]@Boolean", new String[]{"0","True"})); + + Assert.assertEquals(1.1,RequestUtils.toValue("test@Double", "1.1")); + Assert.assertEquals(2.2,RequestUtils.toValue("test@Double", new String[]{"2.2"})); + Assert.assertArrayEquals(new Double[]{3.3},(Double[])RequestUtils.toValue("test[]@Double", new String[]{"3.3"})); + Assert.assertArrayEquals(new Double[]{4.4, 5.5},(Double[])RequestUtils.toValue("test@Double", new Double[]{4.4,5.5})); + Assert.assertArrayEquals(new Double[]{7.7, 6.6},(Double[])RequestUtils.toValue("test[]@Double", new String[]{"7.7","6.6"})); + + + Calendar c = (Calendar) RequestUtils.toValue("test@Calendar", 0L); + Assert.assertEquals(0L,c.getTimeInMillis()); + c = (Calendar) RequestUtils.toValue("test@Calendar", "1997-07-14T17:23:11+01:30"); + Assert.assertEquals("GMT+01:30", c.getTimeZone().getDisplayName()); + Assert.assertEquals(1997, c.get(Calendar.YEAR)); + Assert.assertEquals(6, c.get(Calendar.MONTH)); // 0 - 11 + Assert.assertEquals(14, c.get(Calendar.DATE)); + Assert.assertEquals(17, c.get(Calendar.HOUR_OF_DAY)); + Assert.assertEquals(23, c.get(Calendar.MINUTE)); + Assert.assertEquals(11, c.get(Calendar.SECOND)); + } + +} diff --git a/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/ResponseUtilsTest.java b/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/ResponseUtilsTest.java new file mode 100644 index 00000000..93150053 --- /dev/null +++ b/extensions/resource/src/test/java/uk/co/tfd/sm/util/http/ResponseUtilsTest.java @@ -0,0 +1,28 @@ +package uk.co.tfd.sm.util.http; + +import javax.ws.rs.core.Response; + +import org.junit.Assert; +import org.junit.Test; + +public class ResponseUtilsTest { + + @Test + public void test() { + testResponse(99, "testMessage"); + testResponse(100, "testMessage"); + testResponse(200, "testMessage"); + testResponse(201, "testMessage"); + testResponse(300, "testMessage"); + testResponse(301, "testMessage"); + testResponse(400, "testMessage"); + testResponse(401, "testMessage"); + testResponse(500, "testMessage"); + } + + private void testResponse(int code, String message) { + Response r = ResponseUtils.getResponse(code, message);// TODO Auto-generated method stub + Assert.assertEquals(code,r.getStatus()); + + } +} diff --git a/extensions/resource/src/test/resources/uk/co/tfd/sm/integration/resource/utf8.testpatterns b/extensions/resource/src/test/resources/uk/co/tfd/sm/integration/resource/utf8.testpatterns new file mode 100644 index 00000000..d5afa58d --- /dev/null +++ b/extensions/resource/src/test/resources/uk/co/tfd/sm/integration/resource/utf8.testpatterns @@ -0,0 +1,1314 @@ + + + + + + + +UTF-8 Sampler + + +

    UTF-8 SAMPLER

    + +  ¥ · £ · € · $ · ¢ · ₡ · ₢ · ₣ · ₤ · ₥ · ₦ · ₧ · ₨ · ₩ · ₪ · ₫ · ₭ · ₮ · ₯ · ₹ + + + +

    +

    +Frank da Cruz
    +The Kermit Project - Columbia University
    +New York City
    +fdc@columbia.edu + +

    +Last update: +Thu Oct 27 12:07:20 2011 +

    +

    +


    +[ PEACE ] +[ Poetry ] +[ I Can Eat Glass ] +[ Pangrams ] +[ HTML Features ] +[ Credits, Tools, Commentary ] +

    + +UTF-8 is an ASCII-preserving encoding method for +Unicode (ISO 10646), the Universal Character Set +(UCS). The UCS encodes most of the world's writing systems in a single +character set, allowing you to mix languages and scripts within a document +without needing any tricks for switching character sets. This web page is +encoded directly in UTF-8. + +

    + +As shown HERE, +Columbia University's Kermit 95 terminal emulation +software can display UTF-8 plain text in Windows 95, 98, ME, NT, XP, Vista, +or Windows 7 when using a monospace Unicode font like Andale Mono WT J or Everson Mono Terminal, or the lesser +populated Courier New, Lucida Console, or Andale Mono. C-Kermit can handle it too, +if you have a Unicode +display. As many languages as are representable in your font can be seen +on the screen at the same time. + +

    + +This, however, is a Web page, which started out as a kind of stress test for +UTF-8 support in Web browsers, which was spotty when this page was first +created but which has become standard in all modern browsers. The problem +now is mainly the fonts and the browser's (or font's) support for the +nonzero Unicode planes (as in, e.g., the Braille and Gothic examples below). +And to some extent the rendition of combining sequences, right-to-left +rendition (Arabic, Hebrew), and so on. CLICK HERE for a +survey of Unicode fonts for Windows. + +

    + +The subtitle above shows currency symbols of many lands. If they don't +appear as blobs, we're off to a good start! (The one on the end is the +new Indian Rupee +sign which won't show up in fonts for a while.) + +


    +

    Poetry

    + +From the Anglo-Saxon Rune Poem (Rune version): +

    + ᚠᛇᚻ᛫ᛒᛦᚦ᛫ᚠᚱᚩᚠᚢᚱ᛫ᚠᛁᚱᚪ᛫ᚷᛖᚻᚹᛦᛚᚳᚢᛗ
    + ᛋᚳᛖᚪᛚ᛫ᚦᛖᚪᚻ᛫ᛗᚪᚾᚾᚪ᛫ᚷᛖᚻᚹᛦᛚᚳ᛫ᛗᛁᚳᛚᚢᚾ᛫ᚻᛦᛏ᛫ᛞᚫᛚᚪᚾ
    + ᚷᛁᚠ᛫ᚻᛖ᛫ᚹᛁᛚᛖ᛫ᚠᚩᚱ᛫ᛞᚱᛁᚻᛏᚾᛖ᛫ᛞᚩᛗᛖᛋ᛫ᚻᛚᛇᛏᚪᚾ᛬
    +
    +

    + +From Laȝamon's Brut +(The Chronicles of England, Middle English, West Midlands): +

    +

    +An preost wes on leoden, Laȝamon was ihoten
    +He wes Leovenaðes sone -- liðe him be Drihten.
    +He wonede at Ernleȝe at æðelen are chirechen,
    +Uppen Sevarne staþe, sel þar him þuhte,
    +Onfest Radestone, þer he bock radde. +
    +

    + +(The third letter in the author's name is Yogh, missing from many fonts; +CLICK HERE for another Middle English sample +with some explanation of letters and encoding). + +

    + +From the Tagelied of + + +Wolfram von Eschenbach (Middle High German): +

    +Sîne klâwen durh die wolken sint geslagen,
    +er stîget ûf mit grôzer kraft,
    +ich sih in grâwen tägelîch als er wil tagen,
    +den tac, der im geselleschaft
    +erwenden wil, dem werden man,
    +den ich mit sorgen în verliez.
    +ich bringe in hinnen, ob ich kan.
    +sîn vil manegiu tugent michz leisten hiez.
    +

    + +Some lines of + +Odysseus Elytis (Greek): + +

    + + +
    +Monotonic: +

    +Τη γλώσσα μου έδωσαν ελληνική
    +το σπίτι φτωχικό στις αμμουδιές του Ομήρου.
    +Μονάχη έγνοια η γλώσσα μου στις αμμουδιές του Ομήρου.
    +

    +από το Άξιον Εστί
    +του Οδυσσέα Ελύτη + +

    +Polytonic: +

    +Τὴ γλῶσσα μοῦ ἔδωσαν ἑλληνικὴ
    +τὸ σπίτι φτωχικὸ στὶς ἀμμουδιὲς τοῦ Ὁμήρου.
    +Μονάχη ἔγνοια ἡ γλῶσσα μου στὶς ἀμμουδιὲς τοῦ Ὁμήρου.
    +

    +ἀπὸ τὸ Ἄξιον ἐστί
    +τοῦ Ὀδυσσέα Ἐλύτη
    + + + + + + + +

    +
    + +

    + +The first stanza of +Pushkin's Bronze Horseman (Russian):
    +

    +На берегу пустынных волн
    +Стоял он, дум великих полн,
    +И вдаль глядел. Пред ним широко
    +Река неслася; бедный чёлн
    +По ней стремился одиноко.
    +По мшистым, топким берегам
    +Чернели избы здесь и там,
    +Приют убогого чухонца;
    +И лес, неведомый лучам
    +В тумане спрятанного солнца,
    +Кругом шумел.
    +

    + +Šota Rustaveli's Veṗxis Ṭq̇aosani, +̣︡Th, The Knight in the Tiger's Skin (Georgian):

    +

    +ვეპხის ტყაოსანი +შოთა რუსთაველი +

    +ღმერთსი შემვედრე, ნუთუ კვლა დამხსნას სოფლისა შრომასა, +ცეცხლს, წყალსა და მიწასა, ჰაერთა თანა მრომასა; +მომცნეს ფრთენი და აღვფრინდე, მივჰხვდე მას ჩემსა ნდომასა, +დღისით და ღამით ვჰხედვიდე მზისა ელვათა კრთომაასა. +

    +

    + +Tamil poetry of Subramaniya Bharathiyar: + +சுப்ரமணிய பாரதியார் (1882-1921): + +

    +

    + +யாமறிந்த மொழிகளிலே தமிழ்மொழி போல் இனிதாவது எங்கும் காணோம்,
    +பாமரராய் விலங்குகளாய், உலகனைத்தும் இகழ்ச்சிசொலப் பான்மை கெட்டு,
    +நாமமது தமிழரெனக் கொண்டு இங்கு வாழ்ந்திடுதல் நன்றோ? சொல்லீர்!
    +தேமதுரத் தமிழோசை உலகமெலாம் பரவும்வகை செய்தல் வேண்டும். + +
    +

    +Kannada poetry by Kuvempu — ಬಾ ಇಲ್ಲಿ ಸಂಭವಿಸು + +

    +

    + + +ಬಾ ಇಲ್ಲಿ ಸಂಭವಿಸು ಇಂದೆನ್ನ ಹೃದಯದಲಿ +
    + +ನಿತ್ಯವೂ ಅವತರಿಪ ಸತ್ಯಾವತಾರ + +

    + + + + +ಮಣ್ಣಾಗಿ ಮರವಾಗಿ ಮಿಗವಾಗಿ ಕಗವಾಗೀ... + +
    + +ಮಣ್ಣಾಗಿ ಮರವಾಗಿ ಮಿಗವಾಗಿ ಕಗವಾಗಿ + +
    + +ಭವ ಭವದಿ ಭತಿಸಿಹೇ ಭವತಿ ದೂರ + +
    + +ನಿತ್ಯವೂ ಅವತರಿಪ ಸತ್ಯಾವತಾರ || ಬಾ ಇಲ್ಲಿ || + + +

    + + +
    +

    I Can Eat Glass

    + +And from the sublime to the ridiculous, here is a +certain phrase¹ in an assortment of languages: + +

    +

      +
    1. Sanskrit: काचं शक्नोम्यत्तुम् । नोपहिनस्ति माम् ॥ + +
    2. Sanskrit (standard transcription): kācaṃ śaknomyattum; nopahinasti mām. +
    3. Classical Greek: ὕαλον ϕαγεῖν δύναμαι· τοῦτο οὔ με βλάπτει. +
    4. Greek (monotonic): Μπορώ να φάω σπασμένα γυαλιά χωρίς να πάθω τίποτα. +
    5. Greek (polytonic): Μπορῶ νὰ φάω σπασμένα γυαλιὰ χωρὶς νὰ πάθω τίποτα. + +
      Etruscan: (NEEDED) +
    6. Latin: Vitrum edere possum; mihi non nocet. +
    7. Old French: Je puis mangier del voirre. Ne me nuit. +
    8. French: Je peux manger du verre, ça ne me fait pas mal. +
    9. Provençal / Occitan: Pòdi manjar de veire, me nafrariá pas. +
    10. Québécois: J'peux manger d'la vitre, ça m'fa pas mal. +
    11. Walloon: Dji pou magnî do vêre, çoula m' freut nén må. +
      Champenois: (NEEDED) +
      Lorrain: (NEEDED) +
    12. Picard: Ch'peux mingi du verre, cha m'foé mie n'ma. +
      Corsican/Corsu: (NEEDED) +
      Jèrriais: (NEEDED) +
    13. Kreyòl Ayisyen (Haitï): Mwen kap manje vè, li pa blese'm. +
    14. Basque: Kristala jan dezaket, ez dit minik ematen. +
    15. Catalan / Català: Puc menjar vidre, que no em fa mal. +
    16. Spanish: Puedo comer vidrio, no me hace daño. +
    17. Aragonés: Puedo minchar beire, no me'n fa mal . +
      Aranés: (NEEDED) +
      Mallorquín: (NEEDED) +
    18. Galician: Eu podo xantar cristais e non cortarme. +
    19. European Portuguese: Posso comer vidro, não me faz mal. +
    20. Brazilian Portuguese (8): + Posso comer vidro, não me machuca. +
    21. Caboverdiano/Kabuverdianu (Cape Verde): M' podê cumê vidru, ca ta maguâ-m'. +
    22. Papiamentu: Ami por kome glas anto e no ta hasimi daño. +
    23. Italian: Posso mangiare il vetro e non mi fa male. +
    24. Milanese: Sôn bôn de magnà el véder, el me fa minga mal. +
    25. Roman: Me posso magna' er vetro, e nun me fa male. +
    26. Napoletano: M' pozz magna' o'vetr, e nun m' fa mal. +
    27. Venetian: Mi posso magnare el vetro, no'l me fa mae. +
    28. Zeneise (Genovese): Pòsso mangiâ o veddro e o no me fà mâ. +
    29. Sicilian: Puotsu mangiari u vitru, nun mi fa mali. +
      Campinadese (Sardinia): (NEEDED) +
      Lugudorese (Sardinia): (NEEDED) +
    30. Romansch (Grischun): Jau sai mangiar vaider, senza che quai fa donn a mai. +
      Romany / Tsigane: (NEEDED) +
    31. Romanian: Pot să mănânc sticlă și ea nu mă rănește. +
    32. Esperanto: Mi povas manĝi vitron, ĝi ne damaĝas min. +
      Pictish: (NEEDED) +
      Breton: (NEEDED) +
    33. Cornish: Mý a yl dybry gwéder hag éf ny wra ow ankenya. +
    34. Welsh: Dw i'n gallu bwyta gwydr, 'dyw e ddim yn gwneud dolur i mi. +
    35. Manx Gaelic: Foddym gee glonney agh cha jean eh gortaghey mee. +
    36. Old Irish (Ogham): ᚛᚛ᚉᚑᚅᚔᚉᚉᚔᚋ ᚔᚈᚔ ᚍᚂᚐᚅᚑ ᚅᚔᚋᚌᚓᚅᚐ᚜ +
    37. Old Irish (Latin): Con·iccim ithi nglano. Ním·géna. + +
    38. Irish: Is féidir liom gloinne a ithe. Ní dhéanann sí dochar ar bith dom. +
    39. Ulster Gaelic: Ithim-sa gloine agus ní miste damh é. +
    40. Scottish Gaelic: S urrainn dhomh gloinne ithe; cha ghoirtich i mi. +
    41. Anglo-Saxon (Runes): +ᛁᚳ᛫ᛗᚨᚷ᛫ᚷᛚᚨᛋ᛫ᛖᚩᛏᚪᚾ᛫ᚩᚾᛞ᛫ᚻᛁᛏ᛫ᚾᛖ᛫ᚻᛖᚪᚱᛗᛁᚪᚧ᛫ᛗᛖ᛬ +
    42. Anglo-Saxon (Latin): Ic mæg glæs eotan ond hit ne hearmiað me. +
    43. Middle English: Ich canne glas eten and hit hirtiþ me nouȝt. +
    44. English: I can eat glass and it doesn't hurt me. +
    45. English (IPA): [aɪ kæn iːt glɑːs ænd ɪt dɐz nɒt hɜːt miː] (Received Pronunciation) +
    46. English (Braille): ⠊⠀⠉⠁⠝⠀⠑⠁⠞⠀⠛⠇⠁⠎⠎⠀⠁⠝⠙⠀⠊⠞⠀⠙⠕⠑⠎⠝⠞⠀⠓⠥⠗⠞⠀⠍⠑ +
    47. Jamaican: Mi kian niam glas han i neba hot mi. +
    48. Lalland Scots / Doric: Ah can eat gless, it disnae hurt us. +
      Glaswegian: (NEEDED) +
    49. Gothic (4): +𐌼𐌰𐌲 +𐌲𐌻𐌴𐍃 +𐌹̈𐍄𐌰𐌽, +𐌽𐌹 +𐌼𐌹𐍃 +𐍅𐌿 +𐌽𐌳𐌰𐌽 +𐌱𐍂𐌹𐌲𐌲𐌹𐌸. +
    50. Old Norse (Runes): ᛖᚴ ᚷᛖᛏ ᛖᛏᛁ +ᚧ ᚷᛚᛖᚱ ᛘᚾ +ᚦᛖᛋᛋ ᚨᚧ ᚡᛖ +ᚱᚧᚨ ᛋᚨᚱ + +
    51. Old Norse (Latin): Ek get etið gler án þess að verða sár. + +
    52. Norsk / Norwegian (Nynorsk): Eg kan eta glas utan å skada meg. +
    53. Norsk / Norwegian (Bokmål): Jeg kan spise glass uten å skade meg. +
    54. Føroyskt / Faroese: Eg kann eta glas, skaðaleysur. + +
    55. Íslenska / Icelandic: Ég get etið gler án þess að meiða mig. +
    56. Svenska / Swedish: Jag kan äta glas utan att skada mig. +
    57. Dansk / Danish: Jeg kan spise glas, det gør ikke ondt på mig. +
    58. Sønderjysk: Æ ka æe glass uhen at det go mæ naue. +
    59. Frysk / Frisian: Ik kin glês ite, it docht me net sear. + + + +
    60. Nederlands / Dutch: Ik kan glas eten, het doet +mij +geen kwaad. + + +
    61. Kirchröadsj/Bôchesserplat: Iech ken glaas èèse, mer 't deet miech +jing pieng.
    62. + +
    63. Afrikaans: Ek kan glas eet, maar dit doen my nie skade nie. +
    64. Lëtzebuergescht / Luxemburgish: Ech kan Glas iessen, daat deet mir nët wei. +
    65. Deutsch / German: Ich kann Glas essen, ohne mir zu schaden. +
    66. Ruhrdeutsch: Ich kann Glas verkasematuckeln, ohne dattet mich wat jucken tut. +
    67. Langenfelder Platt: +Isch kann Jlaas kimmeln, uuhne datt mich datt weh dääd. +
    68. Lausitzer Mundart ("Lusatian"): Ich koann Gloos assn und doas +dudd merr ni wii. +
    69. Odenwälderisch: Iech konn glaasch voschbachteln ohne dass es mir ebbs daun doun dud. +
    70. Sächsisch / Saxon: 'sch kann Glos essn, ohne dass'sch mer wehtue. +
    71. Pfälzisch: Isch konn Glass fresse ohne dasses mer ebbes ausmache dud. +
    72. Schwäbisch / Swabian: I kå Glas frässa, ond des macht mr nix! +
    73. Deutsch (Voralberg): I ka glas eassa, ohne dass mar weh tuat. +
    74. Bayrisch / Bavarian: I koh Glos esa, und es duard ma ned wei. +
    75. Allemannisch: I kaun Gloos essen, es tuat ma ned weh. + +
    76. Schwyzerdütsch (Zürich): Ich chan Glaas ässe, das schadt mir nöd. +
    77. Schwyzerdütsch (Luzern): Ech cha Glâs ässe, das schadt mer ned. + +
      Plautdietsch: (NEEDED) +
    78. Hungarian: Meg tudom enni az üveget, nem lesz tőle bajom. +
    79. Suomi / Finnish: Voin syödä lasia, se ei vahingoita minua. +
    80. Sami (Northern): Sáhtán borrat lása, dat ii leat bávččas. +
    81. Erzian: Мон ярсан +суликадо, ды +зыян +эйстэнзэ а +ули. +
    82. Northern Karelian: Mie voin syvvä lasie ta minla ei ole kipie. +
    83. Southern Karelian: Minä voin syvvä st'oklua dai minule ei ole kibie. +
      Vepsian: (NEEDED) +
      Votian: (NEEDED) +
      Livonian: (NEEDED) +
    84. Estonian: Ma võin klaasi süüa, see ei tee mulle midagi. +
    85. Latvian: Es varu ēst stiklu, tas man nekaitē. +
    86. Lithuanian: Aš galiu valgyti stiklą ir jis manęs nežeidžia +
      Old Prussian: (NEEDED) +
      Sorbian (Wendish): (NEEDED) +
    87. Czech: Mohu jíst sklo, neublíží mi. +
    88. Slovak: Môžem jesť sklo. Nezraní ma. +
    89. Polska / Polish: Mogę jeść szkło i mi nie szkodzi. +
    90. Slovenian: Lahko jem steklo, ne da bi mi škodovalo. +
    91. Croatian: Ja mogu jesti staklo i ne boli me. + +
    92. Serbian (Latin): Ja mogu da jedem staklo. +
    93. Serbian (Cyrillic): Ја могу да једем стакло. +
    94. Macedonian: Можам да јадам стакло, а не ме штета. +
    95. Russian: Я могу есть стекло, оно мне не вредит. +
    96. Belarusian (Cyrillic): Я магу есці шкло, яно мне не шкодзіць. +
    97. Belarusian (Lacinka): Ja mahu jeści škło, jano mne ne škodzić. + +
    98. Ukrainian: Я можу їсти скло, і воно мені не зашкодить. + + +
    99. Bulgarian: Мога да ям стъкло, то не ми вреди. + +
    100. Georgian: მინას ვჭამ და არა მტკივა. +
    101. Armenian: Կրնամ ապակի ուտել և ինծի անհանգիստ չըներ։ +
    102. Albanian: Unë mund të ha qelq dhe nuk më gjen gjë. +
    103. Turkish: Cam yiyebilirim, bana zararı dokunmaz. +
    104. Turkish (Ottoman): جام ييه بلورم بڭا ضررى طوقونمز +
    105. Bangla / Bengali: +আমি কাঁচ খেতে পারি, তাতে আমার কোনো ক্ষতি হয় না। +
    106. Marathi: मी काच खाऊ शकतो, मला ते दुखत नाही. + + + +
    107. Kannada: + + +ನನಗೆ ಹಾನಿ ಆಗದೆ, ನಾನು ಗಜನ್ನು ತಿನಬಹುದು + + + + +
    108. Hindi: मैं काँच खा सकता हूँ और मुझे उससे कोई चोट नहीं पहुंचती. + + +
    109. Tamil: நான் கண்ணாடி சாப்பிடுவேன், அதனால் எனக்கு ஒரு கேடும் வராது. + + +
    110. Telugu: నేను గాజు తినగలను మరియు అలా చేసినా నాకు ఏమి ఇబ్బంది లేదు + + +
    111. Sinhalese: මට වීදුරු කෑමට හැකියි. එයින් මට කිසි හානියක් සිදු නොවේ. + +
    112. Urdu(3): + میں کانچ کھا سکتا ہوں اور مجھے تکلیف نہیں ہوتی ۔ +
    113. Pashto(3): زه شيشه خوړلې شم، هغه ما نه خوږوي +
    114. Farsi / Persian(3): .من می توانم بدونِ احساس درد شيشه بخورم +
    115. Arabic(3): أنا قادر على أكل الزجاج و هذا لا يؤلمني. + +
      Aramaic: (NEEDED) +
    116. Maltese: Nista' niekol il-ħġieġ u ma jagħmilli xejn. +
    117. Hebrew(3): אני יכול לאכול זכוכית וזה לא מזיק לי. +
    118. Yiddish(3): איך קען עסן גלאָז און עס טוט מיר נישט װײ. +
      Judeo-Arabic: (NEEDED) +
      Ladino: (NEEDED) +
      Gǝʼǝz: (NEEDED) +
      Amharic: (NEEDED) +
    119. Twi: Metumi awe tumpan, ɜnyɜ me hwee. +
    120. Hausa (Latin): Inā iya taunar gilāshi kuma in gamā lāfiyā. +
    121. Hausa (Ajami) (2): +إِنا إِىَ تَونَر غِلَاشِ كُمَ إِن غَمَا لَافِىَا +
    122. Yoruba(4): Mo lè je̩ dígí, kò ní pa mí lára. +
    123. Lingala: Nakokí kolíya biténi bya milungi, ekosála ngáí mabé tɛ́. + + +
    124. (Ki)Swahili: Naweza kula bilauri na sikunyui. + +
    125. Malay: Saya boleh makan kaca dan ia tidak mencederakan saya. +
    126. Tagalog: Kaya kong kumain nang bubog at hindi ako masaktan. +
    127. Chamorro: Siña yo' chumocho krestat, ti ha na'lalamen yo'. +
    128. Fijian: Au rawa ni kana iloilo, ia au sega ni vakacacani kina. +
    129. Javanese: Aku isa mangan beling tanpa lara. +
    130. Burmese: +က္ယ္ဝန္‌တော္‌၊က္ယ္ဝန္‌မ မ္ယက္‌စားနုိင္‌သည္‌။ ၎က္ရောင္‌့ +ထိခုိက္‌မ္ဟု မရ္ဟိပာ။ +(9) + +
    131. Vietnamese (quốc ngữ): Tôi có thể ăn thủy tinh mà không hại gì. +
    132. Vietnamese (nôm) (4): 些 𣎏 世 咹 水 晶 𦓡 空 𣎏 害 咦 +
    133. Khmer: +ខ្ញុំអាចញុំកញ្ចក់បាន +ដោយគ្មានបញ្ហារ + + +
    134. Lao: +ຂອ້ຍກິນແກ້ວໄດ້ໂດຍທີ່ມັນບໍ່ໄດ້ເຮັດໃຫ້ຂອ້ຍເຈັບ. + + + +
    135. Thai: ฉันกินกระจกได้ แต่มันไม่ทำให้ฉันเจ็บ +
    136. Mongolian (Cyrillic): Би шил идэй чадна, надад хортой биш +
    137. Mongolian (Classic) (5): + ᠪᠢ ᠰᠢᠯᠢ ᠢᠳᠡᠶᠦ ᠴᠢᠳᠠᠨᠠ ᠂ ᠨᠠᠳᠤᠷ ᠬᠣᠤᠷᠠᠳᠠᠢ ᠪᠢᠰᠢ +
      Dzongkha: (NEEDED) +
    138. Nepali: म काँच खान सक्छू र मलाई केहि नी हुन्‍न् । + +
    139. Tibetan: ཤེལ་སྒོ་ཟ་ནས་ང་ན་གི་མ་རེད། +
    140. Chinese: 我能吞下玻璃而不伤身体。 +
    141. Chinese (Traditional): 我能吞下玻璃而不傷身體。 + +
    142. Taiwanese(6): Góa ē-tàng chia̍h po-lê, mā bē tio̍h-siong. +
    143. Japanese: 私はガラスを食べられます。それは私を傷つけません。 +
    144. Korean: 나는 유리를 먹을 수 있어요. 그래도 아프지 않아요 +
    145. Bislama: Mi save kakae glas, hemi no save katem mi.
      +
    146. Hawaiian: Hiki iaʻu ke ʻai i ke aniani; ʻaʻole nō lā au e ʻeha.
      +
    147. Marquesan: E koʻana e kai i te karahi, mea ʻā, ʻaʻe hauhau. +
    148. Inuktitut (10): ᐊᓕᒍᖅ ᓂᕆᔭᕌᖓᒃᑯ ᓱᕋᙱᑦᑐᓐᓇᖅᑐᖓ + +
    149. Chinook Jargon: Naika məkmək kakshət labutay, pi weyk ukuk munk-sik nay. +
    150. Navajo: Tsésǫʼ yishą́ągo bííníshghah dóó doo shił neezgai da. +
      Cherokee (and Cree, Chickasaw, Cree, Micmac, Ojibwa, Lakota, +Náhuatl, Quechua, Aymara, +and other American languages): (NEEDED) +
      Garifuna: (NEEDED) +
      Gullah: (NEEDED) +
    151. Lojban: mi kakne le nu citka le blaci .iku'i le se go'i na xrani mi +
    152. Nórdicg: Ljœr ye caudran créneþ ý jor cẃran. +
    +

    + +(Additions, corrections, completions, +gratefuly accepted.) + +

    +For testing purposes, some of these are repeated in a monospace font . . . +

    +

      +
    1. Euro Symbol: €. +
    2. Greek: Μπορώ να φάω σπασμένα γυαλιά χωρίς να πάθω τίποτα. + +
    3. Íslenska / Icelandic: Ég get etið gler án þess að meiða mig. + +
    4. Polish: Mogę jeść szkło, i mi nie szkodzi. +
    5. Romanian: Pot să mănânc sticlă și ea nu mă rănește. +
    6. Ukrainian: Я можу їсти шкло, й воно мені не пошкодить. +
    7. Armenian: Կրնամ ապակի ուտել և ինծի անհանգիստ չըներ։ +
    8. Georgian: მინას ვჭამ და არა მტკივა. +
    9. Hindi: मैं काँच खा सकता हूँ, मुझे उस से कोई पीडा नहीं होती. +
    10. Hebrew(2): אני יכול לאכול זכוכית וזה לא מזיק לי. +
    11. Yiddish(2): איך קען עסן גלאָז און עס טוט מיר נישט װײ. +
    12. Arabic(2): أنا قادر على أكل الزجاج و هذا لا يؤلمني. +
    13. Japanese: 私はガラスを食べられます。それは私を傷つけません。 +
    14. Thai: ฉันกินกระจกได้ แต่มันไม่ทำให้ฉันเจ็บ +
    +

    + +Notes: + +

    +

      + +
    1. The "I can eat glass" phrase and initial translations (about 30 of them) +were borrowed from Ethan Mollick's I Can Eat Glass page +(which disappeared on or about June 2004) and converted to UTF-8. Since +Ethan's original page is gone, I should mention that his purpose was to offer +travelers a phrase they could use in any country that would command a +certain kind of respect, or at least get attention. See Credits for the many additional contributions since +then. When submitting new entries, the word "hurt" (if you have a choice) +is used in the sense of "cause harm", "do damage", or "bother", rather than +"inflict pain" or "make sad". In this vein Otto Stolz comments (as do +others further down; personally I think it's better for the purpose of this +page to have extra entries and/or to show a greater repertoire of characters +than it is to enforce a strict interpretation of the word "hurt"!): + +

      +

      + + +This is the meaning I have translated to the Swabian dialect. + +However, I just have noticed that most of the German variants +translate the "inflict pain" meaning. The German example should +read: + +

      +

      +"Ich kann Glas essen ohne mir zu schaden." +
      +

      + +rather than: + +

      +

      +"Ich kann Glas essen, ohne mir weh zu tun." +
      +

      + +(The comma fell victim to the 1996 orthographic reform, +cf. http://www.ids-mannheim.de/reform/e3-1.html#P76. + +

      + +You may wish to contact the contributors of the following translations +to correct them: + +

      +

        + +
      • Lëtzebuergescht / Luxemburgish: Ech kan Glas iessen, daat deet mir nët wei. +
      • Lausitzer Mundart ("Lusatian"): Ich koann Gloos assn und doas dudd merr ni wii. +
      • Sächsisch / Saxon: 'sch kann Glos essn, ohne dass'sch mer wehtue. +
      • Bayrisch / Bavarian: I koh Glos esa, und es duard ma ned wei. +
      • Allemannisch: I kaun Gloos essen, es tuat ma ned weh. +
      • Schwyzerdütsch: Ich chan Glaas ässe, das tuet mir nöd weeh. +
      +

      + +In contrast, I deem the following translations *alright*: + +

      +

        + +
      • Ruhrdeutsch: Ich kann Glas verkasematuckeln, ohne dattet mich wat jucken tut. +
      • Pfälzisch: Isch konn Glass fresse ohne dasses mer ebbes ausmache dud. +
      • Schwäbisch / Swabian: I kå Glas frässa, ond des macht mr nix! +
      +

      + +(However, you could remove the commas, on account of +http://www.ids-mannheim.de/reform/e3-1.html#P76 +and + +http://www.ids-mannheim.de/reform/e3-1.html#P72, respectively.) + +

      + +I guess, also these examples translate the wrong sense of "hurt", +though I do not know these languages well enough to assert them +definitely: + +

      +

        + +
      • Nederlands / Dutch: Ik kan glas eten; het doet mij geen +pijn. (This one has been changed) +
      • Kirchröadsj/Bôchesserplat: Iech ken glaas èèse, mer 't deet miech jing pieng. + +
      +

      + +In the Romanic languages, the variations on "fa male" (it) are probably +wrong, whilst the variations on "hace daño" (es) and "damaĝas" (Esperanto) are probably correct; "nocet" (la) is definitely right. + +

      + +The northern Germanic variants of "skada" are probably right, as are +the Slavic variants of "škodi/шкоди" (se); however the Slavic variants +of " boli" (hv) are probably wrong, as "bolena" means "pain/ache", IIRC. + + +

      +

      +That was from July 2004. In December 2007, Otto writes again: + +

      +

      + +Hello Frank, + +in days of yore, I had written:
      +> "Ich kann Glas essen ohne mir zu schaden."
      +> (The comma fell victim to the 1996 orthographic reform, +

      +cf. http://www.ids-mannheim.de/reform/e3-1.html#P76. +

      + +The latest revision (2006) of the official German orthography +has revived the comma around infinitive clauses commencing with +ohne, or 5 other conjunctions, or depending from a noun or +from an announcing demonstrative +(http://www.ids-mannheim.de/reform/regeln2006.pdf, §75). +So, it's again: Ich kann Glas essen, ohne mir zu schaden. +

      +Best wishes,
      +     Otto Stolz +
      +

      +

      + +

    2. The numbering of the samples is arbitrary, done only to keep track of how +many there are, and can change any time a new entry is added. The +arrangement is also arbitrary but with some attempt to group related +examples together. Note: All languages not listed are wanted, not just the +ones that say (NEEDED). + +

      + +

    3. Correct right-to-left display of these languages +depends on the capabilities of your browser. The period should +appear on the left. In the monospace Yiddish example, the Yiddish digraphs +should occupy one character cell. + +

      + +

    4. Yoruba: The third word is Latin letter small 'j' followed by +small 'e' with U+0329, Combining Vertical Line Below. This displays +correctly only if your Unicode font includes the U+0329 glyph and your +browser supports combining diacritical marks. The Lingala and Indic examples +also include combining sequences. + +

      + +

    5. Includes Unicode 3.1 (or later) characters beyond Plane 0. + +

      + +

    6. The Classic Mongolian example should be vertical, top-to-bottom and +left-to-right. But such display is almost impossible. Also no font yet +exists which provides the proper ligatures and positional variants for the +characters of this script, which works somewhat like Arabic. + +

      + +

    7. Taiwanese is also known as Holo or Hoklo, and is related to Southern +Min dialects such as Amoy. +Contributed by Henry H. Tan-Tenn, who comments, "The above is +the romanized version, in a script current among Taiwanese Christians since +the mid-19th century. It was invented by British missionaries and saw use in +hundreds of published works, mostly of a religious nature. Most Taiwanese did +not know Chinese characters then, or at least not well enough to read. More +to the point, though, a written standard using Chinese characters has never +developed, so a significant minority of words are represented with different +candidate characters, depending on one's personal preference or etymological +theory. In this sentence, for example, "-tàng", "chia̍h", +"mā" and "bē" are problematic using Chinese characters. +"Góa" (I/me) and "po-lê" (glass) are as written in other Sinitic +languages (e.g. Mandarin, Hakka)." + +

      + +

    8. Wagner Amaral of Pinese & Amaral Associados notes that +the Brazilian Portuguese sentence for +"I can eat glass" should be identical to the Portuguese one, as the word +"machuca" means "inflict pain", or rather "injuries". The words "faz +mal" would more correctly translate as "cause harm". + +

      + +

    9. Burmese: In English the first person pronoun "I" stands for both +genders, male and female. In Burmese (except in the central part of Burma) +kyundaw (က္ယ္ဝန္‌တော္‌) for male and kyanma (က္ယ္ဝန္‌မ) for female. +Using here a fully-compliant Unicode Burmese font -- sadly one and only one +Padauk Graphite font exists -- rendering using graphite engine. +CLICK HERE to test Burmese +characters. + +

      + +

    10. From Louise Hope, 22 November 2010:  +I decided to have a go at an Inuktitut rendering, mainly in hopes of shaming someone who actually knows the language into coming up with something better. +Meanwhile, try this: +

      +ᐊᓕᒍᖅ ᓂᕆᔭᕌᖓᒃᑯ ᓱᕋᙱᑦᑐᓐᓇᖅᑐᖓ +
      +aliguq nirijaraangakku suranngittunnaqtunga +

      +Loosely: I am able not to hurt myself whenever I eat glass. +

      +aliguq >> glass (uninflected because it is the patient of a transitive verb in an ergative language)
      +nirijaraangakku >> "I eat him/her/it" in Frequentative mood (all one verb with inflectional ending, no affixes whatsoever)
      +suranngittunnaqtunga >> suraq (do permanent harm) + nngit (verb-negator) + tunnaq (ability) + tunga (intransitive ending, making the verb passive or reflexive) +

      +See above about someone who knows the language, et cetera. +

      +Script trivia: the syllable ᙱ is a single unicode character +representing the two elements ᓐ (syllable-final n) and ᖏ +(syllable ngi). I think they just did it that way because it looks tidier +than the expected ᓐᖏ. If your operating system didn't come +with Euphemia (all-purpose UCAS font), you can download Pigiarniq. It comes with a jolly little inuksuk ᐀ that the Unicode Consortium is trying to make into a squatter. +

      + + + +

    + +
    +

    The Quick Brown Fox... Pangrams

    + +The "I can eat glass" sentences do not necessarily show off the orthography of +each language to best advantage. In many alphabetic written languages it is +possible to include all (or most) letters (or "special" characters) in +a single (often nonsense) pangram. These were traditionally used in +typewriter instruction; now they are useful for stress-testing computer fonts +and keyboard input methods. Here are a few examples (SEND MORE): + +

    +

      + +
    1. English: The quick brown fox jumps over the lazy dog. +
    2. Jamaican: Chruu, a kwik di kwik brong fox a jomp huova di liezi daag de, yu no siit? +
    3. Irish: "An ḃfuil do ċroí ag bualaḋ ó ḟaitíos an ġrá a ṁeall lena ṗóg éada ó +ṡlí do leasa ṫú?" +"D'ḟuascail Íosa Úrṁac na hÓiġe Beannaiṫe pór Éava agus Áḋaiṁ." +
    4. Dutch: Pa's wijze lynx bezag vroom het fikse aquaduct. +
    5. German: Falsches Üben von Xylophonmusik quält jeden +größeren Zwerg. (1) +
    6. German: Im finſteren Jagdſchloß am offenen Felsquellwaſſer patzte der affig-flatterhafte kauzig-höf‌liche Bäcker über ſeinem verſifften kniffligen C-Xylophon. (2) +
    7. Norwegian: Blåbærsyltetøy ("blueberry jam", includes every +extra letter used in Norwegian). +
    8. Swedish: Flygande bäckasiner söka strax hwila på mjuka tuvor. +
    9. Icelandic: Sævör grét áðan því úlpan var ónýt. +
    10. Finnish: (5) Törkylempijävongahdus (This is a perfect pangram, every letter appears only once. Translating it is an art on its own, but I'll say "rude lover's yelp". :-D) +
    11. Finnish: (5) Albert osti fagotin ja töräytti puhkuvan melodian. (Albert bought a bassoon and hooted an impressive melody.) +
    12. Finnish: (5) On sangen hauskaa, että polkupyörä on maanteiden jokapäiväinen ilmiö. (It's pleasantly amusing, that the bicycle is an everyday sight on the roads.) +
    13. Polish: Pchnąć w tę łódź jeża lub osiem skrzyń fig. +
    14. Czech: Příliš +žluťoučký kůň úpěl +ďábelské kódy. +
    15. Slovak: Starý kôň na hŕbe +kníh žuje tíško povädnuté +ruže, na stĺpe sa ďateľ +učí kvákať novú ódu o +živote. +
    16. Greek (monotonic): ξεσκεπάζω την ψυχοφθόρα βδελυγμία + +
    17. Greek (polytonic): +ξεσκεπάζω τὴν ψυχοφθόρα βδελυγμία + + +
    18. Russian: +Съешь же ещё этих мягких французских булок да выпей чаю. + +
    19. Russian: +В чащах юга жил-был цитрус? Да, но фальшивый экземпляр! ёъ. + +
    20. Bulgarian: Жълтата дюля беше щастлива, че пухът, който цъфна, замръзна като гьон. + +
    21. Sami (Northern): Vuol Ruoŧa geđggiid leat máŋga luosa ja čuovžža. +
    22. Hungarian: Árvíztűrő tükörfúrógép. +
    23. Spanish: El pingüino Wenceslao hizo kilómetros bajo exhaustiva lluvia y frío, añoraba a su querido cachorro. +
    24. Portuguese: O próximo vôo à noite sobre o Atlântico, põe freqüentemente o único médico. (3) +
    25. French: Les naïfs ægithales hâtifs pondant à Noël où il gèle sont sûrs d'être +déçus en voyant leurs drôles d'œufs abîmés. + +
    26. Esperanto: Eĥoŝanĝo +ĉiuĵaŭde. + +
    27. Hebrew: זה כיף סתם לשמוע איך תנצח קרפד עץ טוב בגן. + +
    28. Japanese (Hiragana):
      +いろはにほへど ちりぬるを
      +わがよたれぞ つねならむ
      +うゐのおくやま けふこえて
      +あさきゆめみじ ゑひもせず +(4) +
      + +
    +

    +Notes: +

    +

      + +
    1. Other phrases commonly used in Germany include: "Ein wackerer Bayer +vertilgt ja bequem zwo Pfund Kalbshaxe" and, more recently, "Franz jagt im +komplett verwahrlosten Taxi quer durch Bayern", but both lack umlauts and +esszet. Previously, going for the shortest sentence that has all the +umlauts and special characters, I had +"Grüße aus Bärenhöfe +(und Óechtringen)!" +Acute accents are not used in native German words, so I was surprised to +discover "Óechtringen" in the Deutsche Bundespost +Postleitzahlenbuch: +

      +

      +Click for full-size image (2.8MB) +
      +

      +It's a small village in eastern Lower Saxony. +The "oe" in this case +turns out to be the Lower Saxon "lengthening e" (Dehnungs-e), which makes the +previous vowel long (used in a number of Lower Saxon place names such as Soest +and Itzehoe), not the "e" that indicates umlaut of the preceding vowel. +Many thanks to the Óechtringen-Namenschreibungsuntersuchungskomitee +(Alex Bochannek, Manfred Erren, Asmus Freytag, Christoph Päper, plus +Werner Lemberg who serves as +Óechtringen-Namenschreibungsuntersuchungskomiteerechtschreibungsprüfer) + +for their relentless pursuit of the facts in this case. Conclusion: the +accent almost certainly does not belong on this (or any other native German) +word, but neither can it be dismissed as dirt on the page. To add to the +mystery, it has been reported that other copies of the same edition of the +PLZB do not show the accent! UPDATE (March 2006): David Krings was +intrigued enough by this report to contact the mayor of Ebstorf, of which +Oechtringen is a borough, who responded: + +

      +

      +Sehr geehrter Mr. Krings,
      +wenn Oechtringen irgendwo mit einem Akzent auf dem O geschrieben wurde, +dann kann das nur ein Fehldruck sein. Die offizielle Schreibweise lautet +jedenfalls „Oechtringen“.
      +Mit freundlichen Grüssen
      +Der Samtgemeindebürgermeister
      +i.A. Lothar Jessel + +
      + + +

      +

    2. From Karl Pentzlin (Kochel am See, Bavaria, Germany): +"This German phrase is suited for display by a Fraktur (broken letter) +font. It contains: all common three-letter ligatures: ffi ffl fft and all +two-letter ligatures required by the Duden for Fraktur typesetting: ch ck ff +fi fl ft ll ſch ſi ſſ ſt tz (all in a +manner such they are not part of a three-letter ligature), one example of f-l +where German typesetting rules prohibit ligating (marked by a ZWNJ), and all +German letters a...z, ä,ö,ü,ß, ſ [long s] +(all in a manner such that they are not part of a two-letter Fraktur +ligature)." + +Otto Stolz notes that "'Schloß' is now spelled 'Schloss', in +contrast to 'größer' (example 4) which has kept its +'ß'. Fraktur has been banned from general use, in 1942, and long-s +(ſ) has ceased to be used with Antiqua (Roman) even earlier (the +latest Antiqua-ſ I have seen is from 1913, but then +I am no expert, so there may well be a later instance." Later Otto confirms +the latter theory, "Now I've run across a book “Deutsche +Rechtschreibung” (edited by Lutz Mackensen) from 1954 (my reprint +is from 1956) that has kept the Antiqua-ſ in its dictionary part (but +neither in the preface nor in the appendix)." + +

      + +

    3. Diaeresis is not used in Iberian Portuguese. + +

      + +

    4. From Yurio Miyazawa: "This poetry contains all the sounds in the +Japanese language and used to be the first thing for children to learn in +their Japanese class. The Hiragana version is particularly neat because it +covers every character in the phonetic Hiragana character set." Yurio also +sent the Kanji version: + +

      +

      +色は匂へど 散りぬるを
      +我が世誰ぞ 常ならむ
      +有為の奥山 今日越えて
      +浅き夢見じ 酔ひもせず +
      + +
    5. Finnish pangrams from Mikko Ristilä. + +
    +

    +Accented Cyrillic: +

    + +(This section contributed by Vladimir Marinov.) + +

    + +In Bulgarian it is desirable, customary, or in some cases required to +write accents over vowels. Unfortunately, no computer character sets +contain the full repertoire of accented Cyrillic letters. With Unicode, +however, it is possible to combine any Cyrillic letter with any combining +accent. The appearance of the result depends on the font and the rendering +engine. Here are two examples. + +

    +

      + +
    1. Той видя бялата коса́ по главата и́ и ко́са на рамото и́, и ре́че да и́ +рече́: "Пара́та по́ па́ри от па́рата, не ща пари́!", но си поми́сли: "Хей, +помисли́ си! А́ и́ река, а́ е скочила в тази река, която щеше да тече́, +а не те́че." + +

      + +

    2. По пъ́тя пъту́ват кю́рди и югославя́ни. + +
    + +
    +

    HTML Features

    + +Here is the Russian alphabet (uppercase only) coded in three +different ways, which should look identical: + +

    +

      +
    1. АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ +  (Literal UTF-8) +
    2. АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ +  (Decimal numeric character reference) +
    3. АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ +  (Hexadecimal numeric character reference) +
    + +

    + +In another test, we use HTML language tags to distinguish Bulgarian, Russian, +and Serbian, +which have different italic forms for lowercase +б, г, д, п, and/or т: +

    +

    + + + + +
    Bulgarian:   +[ бгдпт ]   +бгдпт ]   + Мога да ям стъкло и не ме боли. +
    Russian: +[ бгдпт ]   +бгдпт ]   +Я могу есть стекло, это мне не вредит. +
    Serbian: +[ бгдпт ]   +бгдпт ]   + Могу јести стакло +а +да ми +не +шкоди. +
    +
    +

    + +


    +

    Credits, Tools, and Commentary

    + +
    +
    Credits:
    +
    +The "I can eat glass" phrase and the initial collection of translations: +Ethan Mollick. +Transcription / conversion to UTF-8: Frank da Cruz. +Albanian: Sindi Keesan. +Afrikaans: Johan Fourie, Kevin Poalses. +Anglo Saxon: Frank da Cruz. +Arabic: Najib Tounsi. +Armenian: Vaçe Kundakçı. +Belarusian: Alexey Chernyak, Patricia Clausnitzer. +Bengali: Somnath Purkayastha, Deepayan Sarkar. +Bislama: Dan McGarry. +Braille: Frank da Cruz. +Bulgarian: Sindi Keesan, Guentcho Skordev, Vladimir Marinov. +Burmese: "cetanapa". +Cabo Verde Creole: Cláudio Alexandre Duarte. +Catalán: Jordi Bancells. +Chinese: Jack Soo, Wong Pui Lam. +Chinook Jargon: David Robertson. +Cornish: Chris Stephens. +Croatian: Marjan Baće. +Czech: Stanislav Pecha, Radovan Garabík. +Dutch: Peter Gotink. Pim Blokland, Rob Daniel, Rob de Wit. +Erzian: Jack Rueter. +Esperanto: Franko Luin, Radovan Garabík. +Estonian: Meelis Roos. +Faroese: Jón Gaasedal. +Farsi/Persian: Payam Elahi. +Fijian: Paul Cannon. +Finnish: Sampsa Toivanen, Mikko Ristilä. +French: Luc Carissimo, Anne Colin du Terrail, Sean M. Burke, Theo Morelli. +Galician: Laura Probaos. +Georgian: Giorgi Lebanidze. +German: Christoph Päper, Otto Stolz, Karl Pentzlin, David Krings, +Frank da Cruz, Peter Keel (Seegras), Elias Glantschnig. +Gothic: Aurélien Coudurier. +Greek: Ariel Glenn, Constantine Stathopoulos, Siva Nataraja, Christos Georgiou. +Hebrew: Jonathan Rosenne, Tal Barnea. +Hausa: Malami Buba, Tom Gewecke. +Hawaiian: na Hauʻoli Motta, Anela de Rego, Kaliko Trapp. +Hindi: Shirish Kalele, Nitin Dahra. +Hungarian: András Rácz, Mark Holczhammer. +Icelandic: Andrés Magnússon, Sveinn Baldursson. +International Phonetic Alphabet (IPA): Siva Nataraja / Vincent Ramos. +Inuktitut: Louise Hope. +Irish: Michael Everson, Marion Gunn, James Kass, Curtis Clark. +Italian: Thomas De Bellis. +Jamaican: Stephen J. Cherin. +Japanese: Makoto Takahashi, Yurio Miyazawa. +Kannada: Sridhar R N, Alok G. Singh. +Karelian: Aleksandr Semakov. +Khmer: Tola Sann. +Kirchröadsj: Roger Stoffers. +Kreyòl: Sean M. Burke. +Korean: Jungshik Shin. +Langenfelder Platt: David Krings. +Lao: Tola Sann. +Lëtzebuergescht: Stefaan Eeckels. +Lingala: Denis Moyogo Jacquerye +(Nkóta ya Kɔ́ngɔ míbalé ) +(Nkóta ya Kɔ́ngɔ míbal). +Lithuanian: Gediminas Grigas. +Lojban: Edward Cherlin. +Lusatian: Ronald Schaffhirt. +Macedonian: Sindi Keesan. +Malay: Zarina Mustapha. +Maltese: Kenneth Joseph Vella. +Manx: Éanna Ó Brádaigh. +Marathi: Shirish Kalele. +Marquesan: Kaliko Trapp. +Middle English: Frank da Cruz. +Milanese: Marco Cimarosti. +Mongolian: Tom Gewecke. +Napoletano: Diego Quintano. +Navajo: Tom Gewecke. +Nórdicg: +Yẃlyan Rott. +Nepali: Ujjwol Lamichhane, Rabi Tripathi. +Norwegian: Herman Ranes, Håvard Kvålen. +Odenwälderisch: Alexander Heß. +Old Irish: Michael Everson. +Old Norse: Andrés Magnússon. +Papiamentu: Bianca and Denise Zanardi. +Pashto: N.R. Liwal. +Pfälzisch: Dr. Johannes Sander. +Picard: Philippe Mennecier. +Polish: Juliusz Chroboczek, Paweł Przeradowski, Wlodzislaw Kostecki. +Portuguese: "Cláudio" Alexandre Duarte, Bianca and Denise +Zanardi, Pedro Palhoto Matos, Wagner Amaral. +Québécois: Laurent Detillieux. +Roman: Pierpaolo Bernardi. +Romanian: Juliusz Chroboczek, Ionel Mugurel. +Romansch: Alexandre Suter. +Ruhrdeutsch: "Timwi". +Russian: Alexey Chernyak, Serge Nesterovitch. +Sami: Anne Colin du Terrail, Luc Carissimo. +Sanskrit: Siva Nataraja / Vincent Ramos. +Sächsisch: André Müller. +Schwäbisch: Otto Stolz. +Scots: Jonathan Riddell. +Serbian: Sindi Keesan, Ranko Narancic, Boris Daljevic, Szilvia Csorba, +O. Dag. +Sinhalese: Abdul-Ahad (ASM). +Slovak: G. Adam Stanislav, Radovan Garabík. +Slovenian: Albert Kolar. +Spanish: Aleida Morel, Laura Probaos. +Swahili: Ronald Schaffhirt. +Swedish: Christian Rose, Bengt Larsson. +Taiwanese: Henry H. Tan-Tenn. +Tagalog: Jim Soliven. +Tamil: Vasee Vaseeharan, Vetrivel P. +Telugu: Arjuna Rao Chavala. +Tibetan: D. Germano, Tom Gewecke. +Thai: Alan Wood's wife. +Turkish: Vaçe Kundakçı, Tom Gewecke, Merlign Olnon. +Ukrainian: Michael Zajac, Oleg Podsadny. +Ulster Gaelic: Ciarán Ó Duibhín. +Urdu: Mustafa Ali. +Vietnamese: Dixon Au, +[James] Đỗ Bá Phước +杜 伯 福. +Walloon: Pablo Saratxaga. +Welsh: Geiriadur Prifysgol Cymru (Andrew). +Yiddish: Mark David. +Zeneise: Angelo Pavese. + +

    + +

    Tools Used to Create This Web Page:
    + +
    The UTF8-aware Kermit 95 terminal emulator on +Windows, to a Unix host with the EMACS text editor. Kermit +95 displays UTF-8 and also allows keyboard entry of arbitrary Unicode BMP +characters as 4 hex digits, as shown HERE. Hex codes +for Unicode values can be found in The Unicode +Standard (recommended) and the online code charts. When +submissions arrive by email encoded in some other character set (Latin-1, +Latin-2, KOI, various PC code pages, JEUC, etc), I use the TRANSLATE command +of C-Kermit on the Unix host (where I read my mail) to convert the character set to +UTF-8 (I could also use Kermit 95 for this; it has the same TRANSLATE +command). That's it -- no "Web authoring" tools, no locales, no "smart" +anything. It's just plain text, nothing more. By the way, there's nothing +special about EMACS -- any text editor will do, providing it allows entry of +arbitrary 8-bit bytes as text, including the 0x80-0x9F "C1" range. EMACS 21.1 +actually supports UTF-8; earlier versions don't know about it and display the +octal codes; either way is OK for this purpose. + +

    + +

    Commentary: +
    Date: Wed, 27 Feb 2002 13:21:59 +0100
    +From: "Bruno DEDOMINICIS" <b.dedominicis@cite-sciences.fr>
    +Subject: Je peux manger du verre, cela ne me fait pas mal. + +

    + +I just found out your website and it makes me feel like proposing an +interpretation of the choice of this peculiar phrase. + +

    + +Glass is transparent and can hurt as everyone knows. The relation between +people and civilisations is sometimes effusional and more often rude. The +concept of breaking frontiers through globalization, in a way, is also an +attempt to deny any difference. Isn't "transparency" the flag of modernity? +Nothing should be hidden any more, authority is obsolete, and the new powers +are supposed to reign through loving and smiling and no more through +coercion... + +

    + +Eating glass without pain sounds like a very nice metaphor of this attempt. +That is, frontiers should become glass transparent first, and be denied by +incorporating them. On the reverse, it shows that through globalization, +frontiers undergo a process of displacement, that is, when they are not any +more speakable, they become repressed from the speech and are therefore +incorporated and might become painful symptoms, as for example what happens +when one tries to eat glass. + +

    + +The frontiers that used to separate bodies one from another tend to divide +bodies from within and make them suffer.... The chosen phrase then appears +as a denial of the symptom that might result from the destitution of +traditional frontiers. + +

    +Best,
    +Bruno De Dominicis, Paris, France +

    + +

    +Other Unicode pages onsite: +

    +

    +Unicode samplers and resources offsite: +

    +

    +Unicode fonts: +

    + +

    +[ Kermit 95 ] +[ K95 Screen Shots ] +[ C-Kermit ] +[ Kermit Home ] +[ Display Problems? ] +[ The Unicode Consortium ] +


    +
    +UTF-8 Sampler / The Kermit Project / +Columbia University / +kermit@columbia.edu +
    + + diff --git a/extensions/template/pom.xml b/extensions/template/pom.xml new file mode 100644 index 00000000..07ed4494 --- /dev/null +++ b/extensions/template/pom.xml @@ -0,0 +1,82 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + uk.co.tfd.sm.template + bundle + 0.1-SNAPSHOT + Sparse Map :: Template Engine + Provides Templating support. + + + + org.apache.felix + maven-scr-plugin + + + org.apache.felix + maven-bundle-plugin + true + + + sparse-map + uk.co.tfd.sm.api.template + uk.co.tfd.sm.template.* + + !org.apache.log.*, + !com.werken.*, + !org.apache.commons.logging.*, + org.apache.tools.ant.*;resolution:=optional, + !org.jdom.*, + * + true + velocity,oro + + + + + + + + org.slf4j + slf4j-simple + 1.5.10 + + + org.apache.felix + org.apache.felix.scr.annotations + + + junit + junit + 4.4 + jar + compile + + + org.apache.velocity + velocity + 1.7 + jar + compile + + + org.mockito + mockito-all + 1.8.5 + jar + test + + + oro + oro + 2.0.8 + + + diff --git a/extensions/template/src/main/java/uk/co/tfd/sm/api/template/TemplateService.java b/extensions/template/src/main/java/uk/co/tfd/sm/api/template/TemplateService.java new file mode 100644 index 00000000..941f8ce4 --- /dev/null +++ b/extensions/template/src/main/java/uk/co/tfd/sm/api/template/TemplateService.java @@ -0,0 +1,20 @@ +package uk.co.tfd.sm.api.template; + +import java.io.Reader; +import java.io.Writer; +import java.util.Map; + +public interface TemplateService { + + boolean evaluate(Map context, Writer writer, + String logTag, String templateAsString); + + boolean evaluate(Map context, Writer writer, + String logTag, Reader templateReader); + + boolean process(Map context, String encoding, + Writer writer, String templateName); + + boolean checkTemplateExists(String templateName); + +} diff --git a/extensions/template/src/main/java/uk/co/tfd/sm/template/TemplateServiceImpl.java b/extensions/template/src/main/java/uk/co/tfd/sm/template/TemplateServiceImpl.java new file mode 100644 index 00000000..6e77ac59 --- /dev/null +++ b/extensions/template/src/main/java/uk/co/tfd/sm/template/TemplateServiceImpl.java @@ -0,0 +1,87 @@ +package uk.co.tfd.sm.template; + +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.io.Writer; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; + +import org.apache.commons.collections.ExtendedProperties; +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Service; +import org.apache.velocity.VelocityContext; +import org.apache.velocity.app.VelocityEngine; + +import uk.co.tfd.sm.api.template.TemplateService; + + +@Component(immediate=true, metatype=true) +@Service(value=TemplateService.class) +public class TemplateServiceImpl implements TemplateService { + + /** + * The shared velocity engine, which should cache all the templates. (need + * to sort out how to invalidate). + */ + private VelocityEngine velocityEngine; + + @SuppressWarnings("unused") + @Property(value = "templates") + private static final String PROP_RESOURCE_LOADER_PATH = "file.resource.loader.path"; + + @Activate + public void activate(Map properties) throws IOException { + + Properties p = new Properties(); + InputStream in = this.getClass().getResourceAsStream("templateService.config"); + if ( in != null ) { + p.load(in); + in.close(); + } + // override with any supplied properties. + if ( properties != null ) { + for ( Entry e : properties.entrySet()) { + Object o = e.getValue(); + String k = e.getKey(); + p.put(k, o); + } + } + velocityEngine = new VelocityEngine(p); + VelocityLogger vl = new VelocityLogger(this.getClass()); + if ( properties != null ) { + vl.setDebugMode(Boolean.parseBoolean(String.valueOf(properties.get("debug")))); + } + velocityEngine.setProperty(VelocityEngine.RUNTIME_LOG_LOGSYSTEM, + vl); + + + + ExtendedProperties configuration = new ExtendedProperties(); + velocityEngine.setExtendedProperties(configuration); + velocityEngine.init(); + } + + public boolean evaluate(Map context, Writer writer, + String logTag, String templateAsString) { + return velocityEngine.evaluate(new VelocityContext(context), writer, logTag, templateAsString); + } + + public boolean evaluate(Map context, Writer writer, + String logTag, Reader templateReader) { + return velocityEngine.evaluate(new VelocityContext(context), writer, logTag, templateReader); + } + + public boolean process(Map context, String encoding, Writer writer, String templateName) { + return velocityEngine.mergeTemplate(templateName, encoding, new VelocityContext(context), writer); + + } + + public boolean checkTemplateExists(String templateName) { + return velocityEngine.resourceExists(templateName); + } + +} diff --git a/extensions/template/src/main/java/uk/co/tfd/sm/template/VelocityLogger.java b/extensions/template/src/main/java/uk/co/tfd/sm/template/VelocityLogger.java new file mode 100644 index 00000000..4f1f5490 --- /dev/null +++ b/extensions/template/src/main/java/uk/co/tfd/sm/template/VelocityLogger.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Sakai Foundation (SF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package uk.co.tfd.sm.template; + +import org.apache.velocity.runtime.RuntimeServices; +import org.apache.velocity.runtime.log.LogChute; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A proxy class from velocity to slf4j logging. + */ +public class VelocityLogger implements LogChute { + + private Logger logger; + protected boolean inDebugMode = false; + + /** + * @param class1 + */ + public VelocityLogger(Class toLogClass) { + logger = LoggerFactory.getLogger(toLogClass); + } + + /** + * {@inheritDoc} + * + * @see org.apache.velocity.runtime.log.LogChute#init(org.apache.velocity.runtime.RuntimeServices) + */ + public void init(RuntimeServices arg0) throws Exception { + } + + /** + * {@inheritDoc} + * + * @see org.apache.velocity.runtime.log.LogChute#isLevelEnabled(int) + */ + public boolean isLevelEnabled(int level) { + if (inDebugMode) { + return true; + } + switch (level) { + case LogChute.DEBUG_ID: + return logger.isDebugEnabled(); + case LogChute.ERROR_ID: + return logger.isErrorEnabled(); + case LogChute.INFO_ID: + return logger.isInfoEnabled(); + case LogChute.TRACE_ID: + return logger.isTraceEnabled(); + case LogChute.WARN_ID: + return logger.isWarnEnabled(); + } + return false; + } + + /** + * {@inheritDoc} + * + * @see org.apache.velocity.runtime.log.LogChute#log(int, java.lang.String) + */ + public void log(int level, String msg) { + if (inDebugMode) { + logger.info(msg); + } + switch (level) { + case LogChute.DEBUG_ID: + logger.debug(msg); + break; + case LogChute.ERROR_ID: + logger.error(msg); + break; + case LogChute.INFO_ID: + logger.info(msg); + break; + case LogChute.TRACE_ID: + logger.trace(msg); + break; + case LogChute.WARN_ID: + logger.warn(msg); + break; + } + } + + /** + * {@inheritDoc} + * + * @see org.apache.velocity.runtime.log.LogChute#log(int, java.lang.String, + * java.lang.Throwable) + */ + public void log(int level, String msg, Throwable t) { + if (inDebugMode) { + logger.info(msg, t); + } + switch (level) { + case LogChute.DEBUG_ID: + logger.debug(msg, t); + break; + case LogChute.ERROR_ID: + logger.error(msg, t); + break; + case LogChute.INFO_ID: + logger.info(msg, t); + break; + case LogChute.TRACE_ID: + logger.trace(msg, t); + break; + case LogChute.WARN_ID: + logger.warn(msg, t); + break; + } + } + + public void setDebugMode(boolean debug) { + this.inDebugMode = debug; + } + +} diff --git a/extensions/template/src/main/resources/uk/co/tfd/sm/template/templateService.config b/extensions/template/src/main/resources/uk/co/tfd/sm/template/templateService.config new file mode 100644 index 00000000..0f9bfab8 --- /dev/null +++ b/extensions/template/src/main/resources/uk/co/tfd/sm/template/templateService.config @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# ---------------------------------------------------------------------------- +# R U N T I M E L O G +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# T E M P L A T E E N C O D I N G +# ---------------------------------------------------------------------------- + +input.encoding=UTF-8 +output.encoding=UTF-8 + + +# ---------------------------------------------------------------------------- +# T E M P L A T E L O A D E R S +# ---------------------------------------------------------------------------- +# +# +# ---------------------------------------------------------------------------- + +resource.loader = file, class + +file.resource.loader.description = Velocity File Resource Loader +file.resource.loader.class = org.apache.velocity.runtime.resource.loader.FileResourceLoader +file.resource.loader.path = . +file.resource.loader.cache = false +file.resource.loader.modificationCheckInterval = 2 + +velocimacro.library.autoreload=true +velocimacro.permissions.allow.inline.to.replace.global=true + +class.resource.loader.description = Velocity Classpath Resource Loader +class.resource.loader.class = org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader + + diff --git a/extensions/template/src/test/java/uk/co/tfd/sm/template/TemplateServiceImplTest.java b/extensions/template/src/test/java/uk/co/tfd/sm/template/TemplateServiceImplTest.java new file mode 100644 index 00000000..1e474eee --- /dev/null +++ b/extensions/template/src/test/java/uk/co/tfd/sm/template/TemplateServiceImplTest.java @@ -0,0 +1,42 @@ +package uk.co.tfd.sm.template; + +import java.io.IOException; +import java.io.StringReader; +import java.io.StringWriter; +import java.util.HashMap; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Test; + +public class TemplateServiceImplTest { + + @Test + public void test() throws IOException { + TemplateServiceImpl ts = new TemplateServiceImpl(); + // Google collections not a dep of this project. + Map props = new HashMap(); + props.put("debug", true); + ts.activate(props); + Assert.assertFalse(ts.checkTemplateExists("does-not-exist")); + Assert.assertTrue(ts.checkTemplateExists("testtemplate.vm")); + StringWriter writer = new StringWriter(); + Map context = new HashMap(); + context.put("world", "Cruel World"); + ts.process(context, "UTF-8", writer, "testtemplate.vm"); + String op = writer.toString(); + Assert.assertEquals("Hello Cruel World", op); + + writer = new StringWriter(); + ts.evaluate(context, writer, "FromString", "Goodby ${world}"); + op = writer.toString(); + Assert.assertEquals("Goodby Cruel World", op); + + writer = new StringWriter(); + StringReader reader = new StringReader("Fairwell ${world}"); + ts.evaluate(context, writer, "FromReader", reader); + op = writer.toString(); + Assert.assertEquals("Fairwell Cruel World", op); + + } +} diff --git a/extensions/template/src/test/resources/testtemplate.vm b/extensions/template/src/test/resources/testtemplate.vm new file mode 100644 index 00000000..53b0ff52 --- /dev/null +++ b/extensions/template/src/test/resources/testtemplate.vm @@ -0,0 +1 @@ +Hello ${world} \ No newline at end of file diff --git a/extensions/webdav/pom.xml b/extensions/webdav/pom.xml new file mode 100644 index 00000000..262808f4 --- /dev/null +++ b/extensions/webdav/pom.xml @@ -0,0 +1,73 @@ + + + 4.0.0 + + org.sakaiproject.nakamura + core-base + 5-SNAPSHOT + ../../pom.xml + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.webdav + bundle + 0.1-SNAPSHOT + Sparse Map :: WebDav Support + Provides PostgreSQL driver for Sparse, optional bundle + + UTF-8 + + + + + org.apache.felix + maven-bundle-plugin + true + + + + http://groups.google.com/group/sakai-nakamura + + Ian Boston + ${project.artifactId} + sakai-nakamura,driver + uk.co.tfd.sm.milton.* + + * + + + + + + + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.milton + 1.6.4-SNAPSHOT + + + javax.servlet + servlet-api + 2.4 + + + org.sakaiproject.nakamura + org.sakaiproject.nakamura.core + 1.5.1-SNAPSHOT + + + + + milton-releases + Milton Releases + + true + + + false + + http://www.ettrema.com/maven2/ + + + diff --git a/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/AnonAuthHandler.java b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/AnonAuthHandler.java new file mode 100644 index 00000000..86d36a5d --- /dev/null +++ b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/AnonAuthHandler.java @@ -0,0 +1,47 @@ +package uk.co.tfd.sm.milton; + +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.bradmcevoy.http.Auth; +import com.bradmcevoy.http.Auth.Scheme; +import com.bradmcevoy.http.AuthenticationHandler; +import com.bradmcevoy.http.Request; +import com.bradmcevoy.http.Resource; +import com.bradmcevoy.http.SecurityManager; + +public class AnonAuthHandler implements AuthenticationHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(AnonAuthHandler.class); + private SecurityManager securityManager; + + public AnonAuthHandler(SecurityManager securityManager) { + this.securityManager = securityManager; + } + + public boolean supports(Resource r, Request request) { + Auth auth = request.getAuthorization(); + if (auth == null) { + request.setAuthorization(new Auth(Scheme.NEGOTIATE, User.ANON_USER, null)); + return true; + } + return false; + } + + public Object authenticate(Resource resource, Request request) { + LOGGER.debug("authenticate"); + Object o = securityManager.authenticate(User.ANON_USER, null); + LOGGER.debug("result: " + o); + return o; + } + + public String getChallenge(Resource resource, Request request) { + return null; + } + + public boolean isCompatible(Resource resource) { + return true; + } + +} diff --git a/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/LoggingAuthenticationHander.java b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/LoggingAuthenticationHander.java new file mode 100644 index 00000000..dd8d0aee --- /dev/null +++ b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/LoggingAuthenticationHander.java @@ -0,0 +1,42 @@ +package uk.co.tfd.sm.milton; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.bradmcevoy.http.Auth; +import com.bradmcevoy.http.AuthenticationHandler; +import com.bradmcevoy.http.Request; +import com.bradmcevoy.http.Resource; +import com.bradmcevoy.http.SecurityManager; + +public class LoggingAuthenticationHander implements AuthenticationHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(LoggingAuthenticationHander.class); + private SecurityManager securityManager; + + public LoggingAuthenticationHander(SecurityManager securityManager) { + this.securityManager = securityManager; + } + + public boolean supports(Resource r, Request request) { + Auth a = request.getAuthorization(); + LOGGER.info("Supports {} {} {} ",new Object[]{r,request,a}); + return false; + } + + public Object authenticate(Resource resource, Request request) { + LOGGER.info("Authenticate {} {} ",new Object[]{resource,request}); + return null; + } + + public String getChallenge(Resource resource, Request request) { + LOGGER.info("Get Callenge {} {} ",new Object[]{resource,request}); + return null; + } + + public boolean isCompatible(Resource resource) { + LOGGER.info("Is COmpatable {} ",new Object[]{resource}); + return false; + } + +} diff --git a/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/MiltonSecurityManager.java b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/MiltonSecurityManager.java new file mode 100644 index 00000000..579e6775 --- /dev/null +++ b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/MiltonSecurityManager.java @@ -0,0 +1,66 @@ +package uk.co.tfd.sm.milton; + +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.bradmcevoy.http.Auth; +import com.bradmcevoy.http.Request; +import com.bradmcevoy.http.Request.Method; +import com.bradmcevoy.http.Resource; +import com.bradmcevoy.http.SecurityManager; +import com.bradmcevoy.http.http11.auth.DigestResponse; + +public class MiltonSecurityManager implements SecurityManager { + + private static final Logger LOGGER = LoggerFactory + .getLogger(MiltonSecurityManager.class); + private String realm; + private Repository reposiotry; + private ThreadedSessionTracker sessionTracker; + + public MiltonSecurityManager(Repository repository, ThreadedSessionTracker sessionTracker, String realm) { + this.reposiotry = repository; + this.realm = realm; + this.sessionTracker = sessionTracker; + } + + public Object authenticate(DigestResponse digestRequest) { + throw new RuntimeException( + "Digest authentication is not supported since it depends on" + + " either storing passwords clear text or storing user:realm:password md5 hashed," + + " both of which represent major security issues on a server."); + } + + public Object authenticate(String user, String password) { + try { + LOGGER.debug("Authenticating {} ", user); + if (user == null || User.ANON_USER.equals(user)) { + return sessionTracker.register(reposiotry.login()); + } + return sessionTracker.register(reposiotry.login(user, password)); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } catch (AccessDeniedException e) { + LOGGER.error(e.getMessage(), e); + } + return null; + } + + public boolean authorise(Request request, Method method, Auth auth, + Resource resource) { + return true; + } + + public String getRealm(String host) { + return realm; + } + + public boolean isDigestAllowed() { + return false; + } + +} diff --git a/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/SessionTrackerFilter.java b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/SessionTrackerFilter.java new file mode 100644 index 00000000..385f5067 --- /dev/null +++ b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/SessionTrackerFilter.java @@ -0,0 +1,50 @@ +package uk.co.tfd.sm.milton; + +import java.util.Set; + +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.Session; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.bradmcevoy.http.Filter; +import com.bradmcevoy.http.FilterChain; +import com.bradmcevoy.http.Request; +import com.bradmcevoy.http.Response; +import com.google.common.collect.Sets; + +public class SessionTrackerFilter implements Filter, ThreadedSessionTracker { + + private static final Logger LOGGER = LoggerFactory.getLogger(SessionTrackerFilter.class); + private ThreadLocal> tracker = new ThreadLocal>() { + protected Set initialValue() { return Sets.newHashSet(); }; + }; + + public void process(FilterChain chain, Request request, Response response) { + try { + chain.process(request, response); + for ( Session s : tracker.get()) { + LOGGER.debug("Committing {} ", s); + s.commit(); + } + } finally { + Set sessions = tracker.get(); + for ( Session s : sessions) { + try { + LOGGER.debug("Logout {} ", s); + s.logout(); + } catch (ClientPoolException e) { + LOGGER.error(e.getMessage(),e); + } + } + sessions.clear(); + } + + } + + public Session register(Session session) { + tracker.get().add(session); + return session; + } + +} diff --git a/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/ThreadedSessionTracker.java b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/ThreadedSessionTracker.java new file mode 100644 index 00000000..b0579613 --- /dev/null +++ b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/ThreadedSessionTracker.java @@ -0,0 +1,9 @@ +package uk.co.tfd.sm.milton; + +import org.sakaiproject.nakamura.api.lite.Session; + +public interface ThreadedSessionTracker { + + Session register(Session session); + +} diff --git a/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/spase/SparseMiltonContentResource.java b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/spase/SparseMiltonContentResource.java new file mode 100644 index 00000000..ac9a09d8 --- /dev/null +++ b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/spase/SparseMiltonContentResource.java @@ -0,0 +1,716 @@ +package uk.co.tfd.sm.milton.spase; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import javax.xml.namespace.QName; + +import org.apache.commons.lang.StringUtils; +import org.sakaiproject.nakamura.api.lite.ClientPoolException; +import org.sakaiproject.nakamura.api.lite.CommitHandler; +import org.sakaiproject.nakamura.api.lite.Repository; +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permission; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; +import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; +import org.sakaiproject.nakamura.api.lite.authorizable.User; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.sakaiproject.nakamura.api.lite.content.ContentManager; +import org.sakaiproject.nakamura.api.lite.lock.AlreadyLockedException; +import org.sakaiproject.nakamura.api.lite.lock.LockState; +import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.bradmcevoy.common.ContentTypeUtils; +import com.bradmcevoy.http.Auth; +import com.bradmcevoy.http.CollectionResource; +import com.bradmcevoy.http.FileItem; +import com.bradmcevoy.http.FileResource; +import com.bradmcevoy.http.FolderResource; +import com.bradmcevoy.http.LockInfo; +import com.bradmcevoy.http.LockInfo.LockDepth; +import com.bradmcevoy.http.LockInfo.LockScope; +import com.bradmcevoy.http.LockInfo.LockType; +import com.bradmcevoy.http.LockResult; +import com.bradmcevoy.http.LockResult.FailureReason; +import com.bradmcevoy.http.LockTimeout; +import com.bradmcevoy.http.LockTimeout.DateAndSeconds; +import com.bradmcevoy.http.LockToken; +import com.bradmcevoy.http.LockableResource; +import com.bradmcevoy.http.LockingCollectionResource; +import com.bradmcevoy.http.Range; +import com.bradmcevoy.http.Request; +import com.bradmcevoy.http.Request.Method; +import com.bradmcevoy.http.Resource; +import com.bradmcevoy.http.exceptions.BadRequestException; +import com.bradmcevoy.http.exceptions.ConflictException; +import com.bradmcevoy.http.exceptions.LockedException; +import com.bradmcevoy.http.exceptions.NotAuthorizedException; +import com.bradmcevoy.http.exceptions.PreConditionFailedException; +import com.bradmcevoy.property.MultiNamespaceCustomPropertyResource; +import com.bradmcevoy.property.PropertySource.PropertyAccessibility; +import com.bradmcevoy.property.PropertySource.PropertyMetaData; +import com.bradmcevoy.property.PropertySource.PropertySetException; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMap.Builder; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; + +public class SparseMiltonContentResource implements FileResource, FolderResource, + MultiNamespaceCustomPropertyResource, LockableResource, + LockingCollectionResource { + + public static class LockHolder { + + private LockInfo lockInfo; + private long timeoutSeconds; + private long expiryTime; + private String asString; + private LockTimeout lockTimeout; + + public LockHolder(LockInfo lockInfo, LockTimeout timeout) { + Long timeoutSeconds = timeout.getSeconds(); + if (timeoutSeconds == null) { + timeoutSeconds = 3600L; + } + DateAndSeconds t = timeout.getLockedUntil(3600L, timeoutSeconds); + this.lockInfo = lockInfo; + this.lockTimeout = timeout; + this.timeoutSeconds = timeoutSeconds; + this.expiryTime = t.date.getTime(); + asString = lockInfo.lockedByUser + ":" + lockInfo.depth + ":" + + lockInfo.scope + ":" + lockInfo.type + ":" + expiryTime + + ":" + timeoutSeconds; + + } + + public LockHolder(String lock, boolean adjustTimeout) { + String[] parts = StringUtils.split(lock, ':'); + this.lockInfo = new LockInfo(LockScope.valueOf(parts[2]), + LockType.valueOf(parts[3]), parts[0], + LockDepth.valueOf(parts[1])); + this.expiryTime = Long.parseLong(parts[4]); + if (adjustTimeout) { + timeoutSeconds = ((System.currentTimeMillis() - expiryTime) / 1000); + } else { + timeoutSeconds = Long.parseLong(parts[5]); + } + this.lockTimeout = new LockTimeout(timeoutSeconds); + asString = lockInfo.lockedByUser + ":" + lockInfo.depth + ":" + + lockInfo.scope + ":" + lockInfo.type + ":" + expiryTime + + ":" + timeoutSeconds; + + } + + @Override + public String toString() { + return asString; + } + + public long getTimeoutInSeconds() { + return timeoutSeconds; + } + + public LockInfo getLockInfo() { + return lockInfo; + } + + public LockTimeout getLockTimeout() { + return lockTimeout; + } + + } + + private static final Logger LOGGER = LoggerFactory + .getLogger(SparseMiltonContentResource.class); + private static final Map METHOD_PERMISSIONS = getMethodPermissionMap(); + private static final Set PROPERTY_WRITE_METHODS = ImmutableSet + .of(Method.PROPPATCH); + private static final Set REDIRECT_METHODS = ImmutableSet.of( + Method.ACL, Method.COPY, Method.DELETE, Method.MKCALENDAR, + Method.MKCOL, Method.MOVE, Method.POST, Method.PROPPATCH, + Method.PUT); + private static final Set WRITE_METHODS = ImmutableSet.of( + Method.COPY, Method.ACL, Method.DELETE, Method.MKCALENDAR, + Method.MKCOL, Method.MOVE, Method.POST, Method.PROPPATCH, + Method.PUT); + private static final long LONG_MAX_AGE = 3600L * 24L * 45L; + + private String name; + private Repository repository; + private String path; + private Content content; + private Session session; + private boolean authorizedToWriteProperties; + + public SparseMiltonContentResource(String name, String path, Session session, Content content) { + this.name = name; + this.path = path; + this.content = content; + this.session = session; + LOGGER.debug("Created content with content object of {} {} ", this, + this.content); + } + + public SparseMiltonContentResource(String path, Session session, + Content newContent) { + this(StorageClientUtils.getObjectName(path), path, session, newContent); + } + + private static Map getMethodPermissionMap() { + Builder b = ImmutableMap.builder(); + b.put(Method.ACL, + Permissions.CAN_ANYTHING_ACL.combine(Permissions.CAN_READ)); + b.put(Method.CONNECT, Permissions.CAN_READ); + b.put(Method.COPY, Permissions.CAN_READ); // need to be able to write to + // the destination + b.put(Method.DELETE, + Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + b.put(Method.GET, Permissions.CAN_READ); + b.put(Method.HEAD, Permissions.CAN_READ); + b.put(Method.LOCK, Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + b.put(Method.MKCALENDAR, + Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + b.put(Method.MKCOL, Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + b.put(Method.MOVE, Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); // need + // to + // check + // destination + b.put(Method.OPTIONS, Permissions.CAN_READ); + b.put(Method.POST, Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); // might + // need + // more + // here + b.put(Method.PROPFIND, Permissions.CAN_READ); + b.put(Method.PROPPATCH, + Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + b.put(Method.PUT, Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + b.put(Method.REPORT, Permissions.CAN_READ); + b.put(Method.TRACE, Permissions.CAN_READ); + b.put(Method.UNLOCK, + Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); + return b.build(); + } + + public void copyTo(CollectionResource toCollection, String name) + throws NotAuthorizedException, BadRequestException, + ConflictException { + try { + String sourcePath = path; + String destPath = StorageClientUtils.newPath( + ((SparseMiltonContentResource) toCollection).getPath(), name); + StorageClientUtils.copyTree(session.getContentManager(), + sourcePath, destPath, true); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + throw new ConflictException(this, e.getMessage()); + } catch (AccessDeniedException e) { + LOGGER.error(e.getMessage(), e); + throw new NotAuthorizedException(toCollection); + } catch (IOException e) { + LOGGER.error(e.getMessage(), e); + throw new ConflictException(this, e.getMessage()); + } + } + + public String getUniqueId() { + LOGGER.debug("Getting Unique ID from {} ", content); + return content.getId(); + } + + public String getName() { + LOGGER.debug("Getting name from {} ", content); + return name; + } + + private String getPath() { + return path; + } + + public Object authenticate(String user, String password) { + LOGGER.debug("Authenticating agains the resource "); + try { + if (user == null || User.ANON_USER.equals(user)) { + return repository.login(); + } + return repository.login(user, password); + } catch (ClientPoolException e) { + LOGGER.error(e.getMessage(), e); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } catch (AccessDeniedException e) { + LOGGER.error(e.getMessage(), e); + } + return null; + } + + public boolean authorise(Request request, Method method, Auth auth) { + Session session = (Session) auth.getTag(); + if (session == null) { + LOGGER.debug("Not Authorized, session == null "); + return false; + } + Permission permission = METHOD_PERMISSIONS.get(method); + if (permission == null) { + LOGGER.debug("Not Authorized, permissions == null "); + return false; + } + try { + session.getAccessControlManager().check(Security.ZONE_CONTENT, + path, permission); + LOGGER.debug("Authorized {} ", permission); + authorizedToWriteProperties = PROPERTY_WRITE_METHODS + .contains(method); + return true; + } catch (AccessDeniedException e) { + LOGGER.error(e.getMessage(), e); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } + LOGGER.debug("Authorize Failed "); + return false; + } + + public String getRealm() { + LOGGER.debug("Get Realm "); + return null; + } + + public Date getModifiedDate() { + LOGGER.debug("Get Modifled "); + if (content != null) { + if (content.hasProperty(Content.LASTMODIFIED_FIELD)) { + return new Date( + (Long) content.getProperty(Content.LASTMODIFIED_FIELD)); + } else { + return new Date(); + } + } + return new Date(0L); + } + + public String checkRedirect(Request request) { + LOGGER.debug("Check Redirect "); + if (REDIRECT_METHODS.contains(request.getMethod())) { + return path; + } + return null; + } + + public void delete() throws NotAuthorizedException, ConflictException, + BadRequestException { + LOGGER.debug("Delete On {} ", path); + try { + Iterable i = content.listChildPaths(); + if (i.iterator().hasNext()) { + throw new ConflictException(this, + "Cant delete if there are child resources"); + } + session.getContentManager().delete(content.getPath()); + } catch (AccessDeniedException e) { + throw new NotAuthorizedException(this); + } catch (StorageClientException e) { + throw new BadRequestException(this, e.getMessage()); + } + } + + + public void sendContent(OutputStream out, Range range, + Map params, String contentType) throws IOException, + NotAuthorizedException, BadRequestException { + LOGGER.debug("Send Content "); + try { + InputStream in; + try { + in = session.getContentManager().getInputStream(path); + } catch (IOException e) { + throw new BadRequestException(this, e.getMessage()); + } + if (in == null) { + return; + } + byte[] buffer = new byte[10240]; + if (range != null) { + try { + in.skip(range.getStart()); + } catch (IOException e) { + throw new BadRequestException(this, e.getMessage()); + } + } + for (;;) { + int nr; + try { + nr = in.read(buffer); + } catch (IOException e) { + throw new BadRequestException(this, e.getMessage()); + } + if (nr == -1) { + break; + } + out.write(buffer, 0, nr); + } + } catch (StorageClientException e) { + throw new BadRequestException(this, e.getMessage()); + } catch (AccessDeniedException e) { + throw new NotAuthorizedException(this); + } + } + + public Long getMaxAgeSeconds(Auth auth) { + LOGGER.debug("Get Max Age for {} ", auth); + Session session = (Session) auth.getTag(); + if (session == null || User.ANON_USER.equals(session.getUserId())) { + return LONG_MAX_AGE; + } + try { + User anonUser = (User) session.getAuthorizableManager() + .findAuthorizable(User.ANON_USER); + if (session.getAccessControlManager().can(anonUser, + Security.ZONE_CONTENT, path, Permissions.CAN_READ)) { + return LONG_MAX_AGE; + } + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } catch (AccessDeniedException e) { + LOGGER.error(e.getMessage(), e); + } + return null; + } + + public String getContentType(String accepts) { + LOGGER.debug("Get Content type for {} ", content); + if (content == null) { + return null; + } + String contentType = (String) content + .getProperty(Content.MIMETYPE_FIELD); + if (contentType == null) { + return null; + } + return ContentTypeUtils.findAcceptableContentType(contentType, accepts); + } + + public Long getContentLength() { + LOGGER.debug("Get Content length {} ", content); + if (content == null) { + return null; + } + return (Long) content.getProperty(Content.LENGTH_FIELD); + } + + public void moveTo(CollectionResource rDest, String name) + throws ConflictException, NotAuthorizedException, + BadRequestException { + try { + String sourcePath = path; + String destPath = StorageClientUtils.newPath( + ((SparseMiltonContentResource) rDest).getPath(), name); + LOGGER.debug( + "====================================== Moving from {} to {} ", + sourcePath, destPath); + session.getContentManager().moveWithChildren(sourcePath, destPath); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + throw new ConflictException(this, e.getMessage()); + } catch (AccessDeniedException e) { + LOGGER.error(e.getMessage(), e); + throw new NotAuthorizedException(rDest); + } + } + + public String processForm(Map parameters, + Map files) throws BadRequestException, + NotAuthorizedException, ConflictException { + LOGGER.info("Process form {} {} ", parameters, files); + // TODO Auto-generated method stub + return null; + } + + public Date getCreateDate() { + if (content == null) { + return null; + } + Long created = (Long) content.getProperty(Content.CREATED_FIELD); + if (created != null) { + return new Date(created); + } + return new Date(); + } + + public CollectionResource createCollection(String newName) + throws NotAuthorizedException, ConflictException, + BadRequestException { + LOGGER.debug("Create Collection ", newName); + try { + String newPath = StorageClientUtils.newPath(path, newName); + ContentManager contentManager = session.getContentManager(); + Content newContent = contentManager.get(newPath); + if (newContent != null) { + throw new ConflictException(this, "Collection already exists"); + } + newContent = new Content(newPath, ImmutableMap.of("collection", + (Object) true)); + contentManager.update(newContent); + newContent = contentManager.get(newPath); + return new SparseMiltonContentResource(newPath, session, newContent); + } catch (StorageClientException e) { + throw new BadRequestException(this, e.getMessage()); + } catch (AccessDeniedException e) { + throw new NotAuthorizedException(this); + } + } + + public Resource child(String childName) { + LOGGER.debug("Get Child ", childName); + try { + String newPath = StorageClientUtils.newPath(path, childName); + Content c = session.getContentManager().get(newPath); + if (c != null) { + return new SparseMiltonContentResource(newPath, session, c); + } + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } catch (AccessDeniedException e) { + LOGGER.error(e.getMessage(), e); + } + return null; + } + + public List getChildren() { + // this needs to be disposed by the system. + LOGGER.debug("Get Children "); + final Iterator children = content.listChildren().iterator(); + return ImmutableList.copyOf(new PreemptiveIterator() { + + private SparseMiltonContentResource resource; + + @Override + protected boolean internalHasNext() { + while (children.hasNext()) { + Content n = children.next(); + if (n != null) { + resource = new SparseMiltonContentResource(n + .getPath(), session, n); + return true; + } + } + resource = null; + return false; + } + + @Override + protected SparseMiltonContentResource internalNext() { + return resource; + } + + }); + } + + public Resource createNew(String newName, InputStream inputStream, + Long length, String contentType) throws IOException, + ConflictException, NotAuthorizedException, BadRequestException { + LOGGER.debug("Create new {} ", newName); + try { + String newPath = StorageClientUtils.newPath(path, newName); + ContentManager contentManager = session.getContentManager(); + Content newContent = contentManager.get(newPath); + if (newContent == null) { + newContent = new Content(newPath, ImmutableMap.of( + Content.MIMETYPE_FIELD, (Object) contentType)); + } else { + newContent.setProperty(Content.MIMETYPE_FIELD, + (Object) contentType); + } + contentManager.update(newContent); + contentManager.writeBody(newPath, inputStream); + newContent = contentManager.get(newPath); + return new SparseMiltonContentResource(newPath, session, newContent); + } catch (StorageClientException e) { + throw new BadRequestException(this, e.getMessage()); + } catch (AccessDeniedException e) { + throw new NotAuthorizedException(this); + } + } + + // ========= PROPFIND PROPPATCH Support =============================== + + public Object getProperty(QName name) { + String n = getFullName(name); + Object o = content.getProperty(n); + LOGGER.debug("-------------- GETTING {} as {} --------------", n, o); + return o; + } + + public void setProperty(QName name, Object value) + throws PropertySetException, NotAuthorizedException { + String n = getFullName(name); + if (value == null) { + LOGGER.debug("-------------- REMOVING {} --------------", n); + content.removeProperty(n); + } else { + LOGGER.debug("-------------- SETTING {} as {} --------------", n, + value); + content.setProperty(n, value); + } + session.addCommitHandler(content.getId(), new CommitHandler() { + + public void commit() { + try { + session.getContentManager().update(content); + } catch (AccessDeniedException e) { + LOGGER.error(e.getMessage()); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } + } + }); + } + + public PropertyMetaData getPropertyMetaData(QName name) { + if (authorizedToWriteProperties) { + return new PropertyMetaData(PropertyAccessibility.WRITABLE, + Object.class); + } else if (content.hasProperty(getFullName(name))) { + return new PropertyMetaData(PropertyAccessibility.WRITABLE, + Object.class); + } + return null; + } + + private String getFullName(QName name) { + String nameSpace = name.getNamespaceURI(); + if (nameSpace == null || nameSpace.length() == 0) { + return "{None}" + name.getLocalPart(); + } + return name.toString(); + } + + public List getAllPropertyNames() { + List l = Lists.newArrayList(); + for (Entry p : content.getProperties().entrySet()) { + String name = p.getKey(); + if (name.startsWith("{")) { + int i = name.indexOf('}'); + l.add(new QName(name.substring(1, i - 1), name.substring(i + 1))); + } else { + l.add(new QName(name)); + } + } + return l; + } + + // ========= LOCK Support =============================== + + public LockToken createAndLock(String name, LockTimeout timeout, + LockInfo lockInfo) throws NotAuthorizedException { + LOGGER.debug("Create And Lock {} {} ", timeout, lockInfo); + + try { + String newPath = StorageClientUtils.newPath(path, name); + LockHolder lockHolder = new LockHolder(lockInfo, timeout); + String token = session.getLockManager().lock(newPath, + lockHolder.getTimeoutInSeconds(), lockHolder.toString()); + return new LockToken(token, lockInfo, timeout); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + throw new NotAuthorizedException(this); + } catch (AlreadyLockedException e) { + LOGGER.error(e.getMessage(), e); + throw new NotAuthorizedException(this); + } + } + + public LockResult lock(LockTimeout timeout, LockInfo lockInfo) + throws NotAuthorizedException, PreConditionFailedException, + LockedException { + try { + LockHolder lockHolder = new LockHolder(lockInfo, timeout); + String token = session.getLockManager().lock(path, + lockHolder.getTimeoutInSeconds(), lockHolder.toString()); + return LockResult.success(new LockToken(token, lockInfo, timeout)); + } catch (AlreadyLockedException e) { + return LockResult.failed(FailureReason.ALREADY_LOCKED); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + throw new NotAuthorizedException(this); + } + } + + public LockResult refreshLock(String token) throws NotAuthorizedException, + PreConditionFailedException { + try { + LockState lockState = session.getLockManager().getLockState(path, + token); + LOGGER.debug("Refreshing lock with {} gave {} ", token, lockState); + if (lockState.isOwner() && lockState.hasMatchedToken() + && path.equals(lockState.getLockPath())) { + LOGGER.debug("Refreshing lock "); + LockHolder lock = new LockHolder(lockState.getExtra(), false); + session.getLockManager().refreshLock(path, + lock.getTimeoutInSeconds(), lock.toString(), token); + LockInfo lockInfo = lock.getLockInfo(); + LockTimeout timeout = lock.getLockTimeout(); + return LockResult.success(new LockToken(token, lockInfo, + timeout)); + } + LOGGER.debug("Not Refreshing Lock"); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + throw new NotAuthorizedException(this); + } + throw new PreConditionFailedException(this); + } + + public void unlock(String tokenId) throws NotAuthorizedException, + PreConditionFailedException { + try { + LockState lockState = session.getLockManager().getLockState(path, + tokenId); + if (lockState.isOwner() && lockState.hasMatchedToken() + && path.equals(lockState.getLockPath())) { + session.getLockManager().unlock(path, tokenId); + return; + } + } catch (StorageClientException e) { + throw new NotAuthorizedException(this); + } + throw new PreConditionFailedException(this); + } + + public LockToken getCurrentLock() { + try { + // get the lock regardless, the handlers should deal with locking. + LockState lockState = session.getLockManager().getLockState( + path, "unknown"); + if (lockState.isLocked()) { + LOGGER.debug(" getCurrentLock() Found Lock {} {} ", path, + lockState); + String extra = lockState.getExtra(); + String token = lockState.getToken(); + LockHolder lockHolder = new LockHolder(extra, true); + LockToken lockToken = new LockToken(token, lockHolder.getLockInfo(), + lockHolder.getLockTimeout()); + return new LockToken(token, lockHolder.getLockInfo(), + lockHolder.getLockTimeout()); + } + LOGGER.debug("No Lock Present at {} ", path); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } + return null; + } + + +} diff --git a/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/spase/SparseMiltonContentResourceFactory.java b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/spase/SparseMiltonContentResourceFactory.java new file mode 100644 index 00000000..e8f04427 --- /dev/null +++ b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/spase/SparseMiltonContentResourceFactory.java @@ -0,0 +1,59 @@ +package uk.co.tfd.sm.milton.spase; + +import org.sakaiproject.nakamura.api.lite.Session; +import org.sakaiproject.nakamura.api.lite.StorageClientException; +import org.sakaiproject.nakamura.api.lite.StorageClientUtils; +import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; +import org.sakaiproject.nakamura.api.lite.content.Content; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.bradmcevoy.http.HttpManager; +import com.bradmcevoy.http.Resource; +import com.bradmcevoy.http.ResourceFactory; + +public class SparseMiltonContentResourceFactory implements ResourceFactory { + + private static final Logger LOGGER = LoggerFactory + .getLogger(SparseMiltonContentResourceFactory.class); + private String basePath; + + public SparseMiltonContentResourceFactory(String basePath) { + this.basePath = basePath; + } + + public Resource getResource(String host, String path) { + Session session = (Session) HttpManager.request().getAuthorization() + .getTag(); + LOGGER.debug("Get Resource for [{}] ", path); + if ( path == null ) { + path = "/"; + } else if ( path != null && path.startsWith(basePath) ) { + path = path.substring(basePath.length()); + } + if ( path.length() > 1 && path.endsWith("/")) { + path = path.substring(0,path.length()-1); + } + if ( "".equals(path) ) { + path = "/"; + } + try { + Content content = session.getContentManager().get(path); + if (content != null) { + return new SparseMiltonContentResource(path, session, content); + } + if ("/".equals(path) || "".equals(path) || path == null) { + LOGGER.debug("Root Object [{}] ", path); + return new SparseMiltonContentResource(StorageClientUtils.getObjectName(basePath), path, session, new Content( + "/", null)); + } + LOGGER.debug("Not Found {} ", path); + } catch (StorageClientException e) { + LOGGER.error(e.getMessage(), e); + } catch (AccessDeniedException e) { + LOGGER.debug(e.getMessage()); + } + return null; + } + +} diff --git a/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/spase/SparseMiltonServlet.java b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/spase/SparseMiltonServlet.java new file mode 100644 index 00000000..b010ef07 --- /dev/null +++ b/extensions/webdav/src/main/java/uk/co/tfd/sm/milton/spase/SparseMiltonServlet.java @@ -0,0 +1,107 @@ +package uk.co.tfd.sm.milton.spase; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import javax.servlet.Servlet; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.felix.scr.annotations.Activate; +import org.apache.felix.scr.annotations.Component; +import org.apache.felix.scr.annotations.Deactivate; +import org.apache.felix.scr.annotations.Properties; +import org.apache.felix.scr.annotations.Property; +import org.apache.felix.scr.annotations.Reference; +import org.apache.felix.scr.annotations.Service; +import org.sakaiproject.nakamura.api.lite.Repository; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import uk.co.tfd.sm.milton.MiltonSecurityManager; +import uk.co.tfd.sm.milton.SessionTrackerFilter; + +import com.bradmcevoy.http.AuthenticationHandler; +import com.bradmcevoy.http.AuthenticationService; +import com.bradmcevoy.http.Filter; +import com.bradmcevoy.http.HttpManager; +import com.bradmcevoy.http.Request; +import com.bradmcevoy.http.ResourceFactory; +import com.bradmcevoy.http.Response; +import com.bradmcevoy.http.SecurityManager; +import com.bradmcevoy.http.ServletRequest; +import com.bradmcevoy.http.ServletResponse; +import com.bradmcevoy.http.http11.Http11ResponseHandler; +import com.bradmcevoy.http.http11.auth.PreAuthenticationFilter; +import com.bradmcevoy.http.http11.auth.SecurityManagerBasicAuthHandler; +import com.google.common.collect.ImmutableList; + +@Component(immediate = true, metatype = true) +@Service(value = Servlet.class) +@Properties(value = { @Property(name = "alias", value = "/dav") }) +public class SparseMiltonServlet extends HttpServlet { + + /** + * + */ + private static final long serialVersionUID = -7451452381693049651L; + private static final Logger LOGGER = LoggerFactory.getLogger(SparseMiltonServlet.class); + private HttpManager httpManager; + + @Reference + private Repository repository; + + @Activate + public void activate(Map properties) { + String basePath = (String) properties.get("alias"); + if ( basePath == null ) { + basePath = "/dav"; + } + ResourceFactory resourceFactory = new SparseMiltonContentResourceFactory(basePath); + SessionTrackerFilter sessionTrackerFilter = new SessionTrackerFilter(); + SecurityManager securityManager = new MiltonSecurityManager(repository, sessionTrackerFilter, basePath); + List authHandlers = ImmutableList.of( + (AuthenticationHandler) new SecurityManagerBasicAuthHandler(securityManager)); + AuthenticationService authenticationService = new AuthenticationService(authHandlers); + httpManager = new HttpManager(resourceFactory, authenticationService); + Http11ResponseHandler responseHandler = httpManager + .getResponseHandler(); + + Filter authenticationFilter = new PreAuthenticationFilter( + responseHandler, authHandlers); + httpManager.addFilter(0, authenticationFilter); + httpManager.addFilter(1, sessionTrackerFilter); + + } + + @Deactivate + public void deactivate(Map properties) { + } + + @Override + protected void service(HttpServletRequest servletRequest, + HttpServletResponse servletResponse) throws ServletException, + IOException { + String litmusTestId = null; + try { + Request request = new ServletRequest(servletRequest); + Response response = new ServletResponse(servletResponse); + Map headers = request.getHeaders(); + if ( headers.containsKey("X-Litmus") ) { + litmusTestId = headers.get("X-Litmus"); + LOGGER.info("+++++++++++++Litmus Test Start {} ",litmusTestId); + } + httpManager.process(request, response); + } finally { + if ( litmusTestId != null ) { + LOGGER.info("-------------Litmus Test End {} ",litmusTestId); + } + servletResponse.getOutputStream().flush(); + servletResponse.flushBuffer(); + } + } + +} diff --git a/pom.xml b/pom.xml index edf87bad..cb9de530 100644 --- a/pom.xml +++ b/pom.xml @@ -1,145 +1,278 @@ - - 4.0.0 - - base - org.sakaiproject.nakamura - 0.10-SNAPSHOT - - org.sakaiproject.nakamura - org.sakaiproject.nakamura.core - bundle - 0.10-SNAPSHOT - Sakai Nakamura :: Cassandra based server bundle. - Server that uses Cassandra. - - - - org.apache.felix - maven-scr-plugin - - - org.apache.felix - maven-bundle-plugin - true - - - sakai-nakamura - - org.sakaiproject.nakamura.api.lite.* - - - !org.jboss.resource.adapter.jdbc.*, - !com.mchange.v2.c3p0.*, - !org.apache.xpath.*, - !org.apache.xalan.*, - !org.apache.xml.utils.*, - !org.apache.derby.impl.drda.*, - !org.apache.commons.cli.*, - !org.json.simple.*, - !com.reardencommerce.*, - !jline, - !org.apache.hadoop.*, - !org.antlr.runtime.*, - !org.cliffc.high_scale_lib.*, - !com.sun.jna.*, - !org.junit.*, - * - - org.sakaiproject.nakamura.lite.* - libthrift,apache-cassandra,mysql-connector-java,derby - - - - - - - - - - - org.apache.cassandra.deps - libthrift - - 0.5.0 - provided - - - org.apache.cassandra.thrift - apache-cassandra - 0.6.5 - provided - - - - commons-pool - commons-pool - 1.5 - - - - commons-lang - commons-lang - 2.5 - - - - commons-io - commons-io - 1.4 - - - - - com.google.collections - google-collections - 0.8 - - - - - org.apache.felix - org.osgi.core - - - org.apache.felix - org.osgi.compendium - - - - - - org.slf4j - slf4j-api - - - org.slf4j - slf4j-simple - - - org.mockito - mockito-all - 1.8.5 - - - - - org.apache.felix - org.apache.felix.scr.annotations - - - org.apache.derby - derby - 10.6.2.1 - test - - - mysql - mysql-connector-java - 5.1.13 - - + + 4.0.0 + + org.sonatype.oss + oss-parent + + 7 + + org.sakaiproject.nakamura + core-base + pom + 5-SNAPSHOT + Core Base Pom for Map Content Storage bundle. + Core Base + + UTF-8 + true + + + core + drivers/jdbc + + + scm:git:git://github.com/sakaiproject/sparsemapcontent.git + scm:git:git@github.com:sakaiproject/sparsemapcontent.git + http://github.com/sakaiproject/sparsemapcontent/ + + + + + org.apache.felix + org.apache.felix.scr.annotations + 1.6.0 + + + + + + + org.apache.maven.plugins + maven-release-plugin + 2.2.1 + + deploy + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.0.2 + + 1.5 + 1.5 + true + -Xlint:unchecked,deprecation,fallthrough,finally + true + ${project.build.sourceEncoding} + + + + org.apache.maven.plugins + maven-source-plugin + 2.0.4 + + + attach-sources + package + + jar + + + + + true + true + + + + org.apache.felix + maven-scr-plugin + 1.7.4 + + + org.slf4j + slf4j-simple + 1.5.2 + + + + + generate-scr-scrdescriptor + + scr + + + + + + org.apache.felix + maven-bundle-plugin + 2.3.6 + true + + + sparsemapcontent + ${project.artifactId} + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.5 + + + **/integration/**/*Test.java + + + + + + + + + integration + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.5 + + + **/integration/**/*Test.java + + + None.java + + + + + + + + redeploy + + + + org.apache.sling + maven-sling-plugin + 2.0.6 + + + install-bundle + + install + + + + + + + + + release-sign-artifacts + + + performRelease + true + + + + + + org.apache.maven.plugins + maven-gpg-plugin + 1.1 + + + sign-artifacts + verify + + sign + + + + + + + + + + + + org.codehaus.mojo + findbugs-maven-plugin + 2.3.1 + + Max + Low + + + + org.apache.maven.plugins + maven-jxr-plugin + 2.2 + + + org.apache.maven.plugins + maven-pmd-plugin + 2.5 + + 1.6 + UTF-8 + true + + /rulesets/internal/all-java.xml + + + + + org.codehaus.mojo + cobertura-maven-plugin + 2.4 + + + org.apache.maven.plugins + maven-surefire-report-plugin + 2.6 + + + + + + + + + sakai-site + Sakai release Site + scp://source.sakaiproject.org/var/www/html/release/nakamura/${project.version} + + + + + sakai-maven + Sakai Maven Repo + default + http://source.sakaiproject.org/maven2 + + true + + + false + + + + sakai-maven2-snapshots + Sakai Maven Repo + default + http://source.sakaiproject.org/maven2-snapshots + + false + + + true + + + + http://sakaiproject.org + + + Apache 2 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/RemoveProperty.java b/src/main/java/org/sakaiproject/nakamura/api/lite/RemoveProperty.java deleted file mode 100644 index 6a78174b..00000000 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/RemoveProperty.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.sakaiproject.nakamura.api.lite; - -public class RemoveProperty { - -} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/SessionAdaptable.java b/src/main/java/org/sakaiproject/nakamura/api/lite/SessionAdaptable.java deleted file mode 100644 index 4caabf02..00000000 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/SessionAdaptable.java +++ /dev/null @@ -1,7 +0,0 @@ -package org.sakaiproject.nakamura.api.lite; - -public interface SessionAdaptable { - - Session getSession(); - -} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/StorageCacheManager.java b/src/main/java/org/sakaiproject/nakamura/api/lite/StorageCacheManager.java deleted file mode 100644 index f4d395bd..00000000 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/StorageCacheManager.java +++ /dev/null @@ -1,17 +0,0 @@ -package org.sakaiproject.nakamura.api.lite; - -import java.util.Map; - -/** - * Provides Cache implementations for all the three areas represented as Maps. - * If an implementation of this interface is present it will be used. - */ -public interface StorageCacheManager { - - Map getAccessControlCache(); - - Map getAuthorizableCache(); - - Map getContentCache(); - -} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/StoreListener.java b/src/main/java/org/sakaiproject/nakamura/api/lite/StoreListener.java deleted file mode 100644 index 8ded1605..00000000 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/StoreListener.java +++ /dev/null @@ -1,43 +0,0 @@ -package org.sakaiproject.nakamura.api.lite; - - -public interface StoreListener { - public static final String TOPIC_BASE = "org/sakaiproject/nakamura/lite/"; - public static final String DELETE_TOPIC = "DELETE"; - public static final String ADDED_TOPIC = "ADDED"; - public static final String UPDATED_TOPIC = "UPDATED"; - public static final String DEFAULT_DELETE_TOPIC = TOPIC_BASE + DELETE_TOPIC; - public static final String DEFAULT_CREATE_TOPIC = TOPIC_BASE + ADDED_TOPIC; - public static final String DEFAULT_UPDATE_TOPIC = TOPIC_BASE + UPDATED_TOPIC; - public static final String[] DEFAULT_TOPICS = new String[] { DEFAULT_CREATE_TOPIC, - DEFAULT_UPDATE_TOPIC, DEFAULT_DELETE_TOPIC, - TOPIC_BASE + "authorizables/" + DELETE_TOPIC, - TOPIC_BASE + "groups/" + DELETE_TOPIC, - TOPIC_BASE + "users/" + DELETE_TOPIC, - TOPIC_BASE + "admin/" + DELETE_TOPIC, - TOPIC_BASE + "authorizables/" + DELETE_TOPIC, - TOPIC_BASE + "content/" + DELETE_TOPIC, - TOPIC_BASE + "authorizables/"+ADDED_TOPIC, - TOPIC_BASE + "groups/"+ADDED_TOPIC, - TOPIC_BASE + "users/"+ADDED_TOPIC, - TOPIC_BASE + "admin/"+ADDED_TOPIC, - TOPIC_BASE + "authorizables/"+ADDED_TOPIC, - TOPIC_BASE + "content/"+ADDED_TOPIC, - TOPIC_BASE + "authorizables/"+UPDATED_TOPIC, - TOPIC_BASE + "groups/"+UPDATED_TOPIC, - TOPIC_BASE + "users/"+UPDATED_TOPIC, - TOPIC_BASE + "admin/"+UPDATED_TOPIC, - TOPIC_BASE + "authorizables/"+UPDATED_TOPIC, - TOPIC_BASE + "content/"+UPDATED_TOPIC }; - public static final String USERID_PROPERTY = "userid"; - public static final String PATH_PROPERTY = "path"; - - void onDelete(String zone, String path, String user, String... attributes); - - void onUpdate(String zone, String path, String user, boolean isNew, String... attributes); - - void onLogin(String id, String string); - - void onLogout(String id, String string); - -} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Authorizable.java b/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Authorizable.java deleted file mode 100644 index 2f47f556..00000000 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/authorizable/Authorizable.java +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.api.lite.authorizable; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; - -import org.apache.commons.lang.StringUtils; -import org.sakaiproject.nakamura.api.lite.RemoveProperty; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.StorageClientUtils; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.api.lite.util.Iterables; -import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -/** - * Base Authorizable object. - */ -public class Authorizable { - - public static final String PASSWORD_FIELD = "pwd"; - - public static final String PRINCIPALS_FIELD = "principals"; - - public static final String MEMBERS_FIELD = "members"; - - public static final String ID_FIELD = "id"; - - public static final String NAME_FIELD = "name"; - - public static final String AUTHORIZABLE_TYPE_FIELD = "type"; - - public static final String GROUP_VALUE = "g"; - public static final Object USER_VALUE = "u"; - - public static final String ADMINISTRATORS_GROUP = "administrators"; - - public static final String LASTMODIFIED = "lastModified"; - public static final String LASTMODIFIED_BY = "lastModifiedBy"; - public static final String CREATED = "created"; - public static final String CREATED_BY = "createdBy"; - - private static final Set FILTER_PROPERTIES = ImmutableSet.of(PASSWORD_FIELD, ID_FIELD); - - private static final Set PRIVATE_PROPERTIES = ImmutableSet.of(PASSWORD_FIELD); - - public static final String NO_PASSWORD = "--none--"; - - protected static final Logger LOGGER = LoggerFactory.getLogger(Authorizable.class); - - /** - * A read only copy of the map, protected by an Immutable Wrapper - */ - protected ImmutableMap authorizableMap; - protected Set principals; - - protected String id; - - /** - * Modifications to the map. - */ - protected Map modifiedMap; - protected boolean principalsModified; - - private boolean isObjectNew = true; - - protected boolean readOnly; - - public Authorizable(Map autorizableMap) { - principalsModified = false; - modifiedMap = Maps.newHashMap(); - init(autorizableMap); - } - - private void init(Map newMap) { - this.authorizableMap = ImmutableMap.copyOf(newMap); - Object principalsB = authorizableMap.get(PRINCIPALS_FIELD); - if (principalsB == null) { - this.principals = Sets.newLinkedHashSet(); - } else { - this.principals = Sets.newLinkedHashSet(Iterables.of(StringUtils.split( - (String) principalsB, ';'))); - } - this.id = (String) authorizableMap.get(ID_FIELD); - if (!User.ANON_USER.equals(this.id)) { - this.principals.add(Group.EVERYONE); - } - } - - public void reset(Map newMap) { - if ( !readOnly ) { - principalsModified = false; - modifiedMap.clear(); - init(newMap); - - LOGGER.debug("After Update to Authorizable {} ", authorizableMap); - } - } - - - public String[] getPrincipals() { - return principals.toArray(new String[principals.size()]); - } - - public String getId() { - return id; - } - - // TODO: Unit test - public Map getSafeProperties() { - if (!readOnly && principalsModified) { - modifiedMap.put(PRINCIPALS_FIELD, StringUtils.join(principals, ';')); - } - return StorageClientUtils.getFilterMap(authorizableMap, modifiedMap, null, FILTER_PROPERTIES); - } - - public boolean isGroup() { - return false; - } - - - public void setProperty(String key, Object value) { - if (!readOnly && !FILTER_PROPERTIES.contains(key)) { - Object cv = authorizableMap.get(key); - if (!value.equals(cv)) { - modifiedMap.put(key,value); - } else if ( modifiedMap.containsKey(key) && !value.equals(modifiedMap.get(key))) { - modifiedMap.put(key, value); - } - - } - } - - public Object getProperty(String key) { - if (!PRIVATE_PROPERTIES.contains(key)) { - if ( modifiedMap.containsKey(key)) { - Object o = modifiedMap.get(key); - if ( o instanceof RemoveProperty ) { - return null; - } else { - return o; - } - } - return authorizableMap.get(key); - } - return null; - } - - public void removeProperty(String key) { - if (!readOnly && authorizableMap.containsKey(key)) { - modifiedMap.put(key, new RemoveProperty()); - } - } - - public void addPrincipal(String principal) { - if (!readOnly && !principals.contains(principal)) { - principals.add(principal); - principalsModified = true; - } - } - - public void removePrincipal(String principal) { - if (!readOnly && principals.contains(principal)) { - principals.remove(principal); - principalsModified = true; - } - } - - public Map getPropertiesForUpdate() { - if (!readOnly && principalsModified) { - principals.remove(Group.EVERYONE); - modifiedMap.put(PRINCIPALS_FIELD, StringUtils.join(principals, ';')); - principals.add(Group.EVERYONE); - } - return StorageClientUtils.getFilterMap(authorizableMap, modifiedMap, null, - FILTER_PROPERTIES); - } - - - - public boolean isModified() { - return !readOnly && (principalsModified || (modifiedMap.size() > 0)); - } - - public boolean hasProperty(String name) { - if ( modifiedMap.get(name) instanceof RemoveProperty ) { - return false; - } - return authorizableMap.containsKey(name); - } - - public Iterator memberOf(final AuthorizableManager authorizableManager) { - final List memberIds = new ArrayList(); - Collections.addAll(memberIds, getPrincipals()); - return new PreemptiveIterator() { - - private int p; - private Group group; - - protected boolean internalHasNext() { - while (p < memberIds.size()) { - String id = memberIds.get(p); - p++; - try { - Authorizable a = authorizableManager.findAuthorizable(id); - if (a instanceof Group) { - group = (Group) a; - for (String pid : a.getPrincipals()) { - if (!memberIds.contains(pid)) { - memberIds.add(pid); - } - } - return true; - } - } catch (AccessDeniedException e) { - LOGGER.debug(e.getMessage(), e); - } catch (StorageClientException e) { - LOGGER.debug(e.getMessage(), e); - } - } - return false; - } - - protected Group internalNext() { - return group; - } - - }; - } - - protected void setObjectNew(boolean isObjectNew) { - this.isObjectNew = isObjectNew; - } - - public boolean isNew() { - return isObjectNew; - } - - protected void setReadOnly(boolean readOnly) { - if ( !this.readOnly ) { - this.readOnly = readOnly; - } - } - - - @Override - public int hashCode() { - return id.hashCode(); - } - - - @Override - public boolean equals(Object obj) { - if ( obj instanceof Authorizable ) { - Authorizable a = (Authorizable) obj; - return id.equals(a.getId()); - } - return super.equals(obj); - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/api/lite/util/PreemptiveIterator.java b/src/main/java/org/sakaiproject/nakamura/api/lite/util/PreemptiveIterator.java deleted file mode 100644 index 5187dd18..00000000 --- a/src/main/java/org/sakaiproject/nakamura/api/lite/util/PreemptiveIterator.java +++ /dev/null @@ -1,53 +0,0 @@ -package org.sakaiproject.nakamura.api.lite.util; - -import org.sakaiproject.nakamura.lite.storage.DisposableIterator; - -import java.util.Iterator; -import java.util.NoSuchElementException; - -/** - * A Iterator wrapper that pre-emptively checks the next value in the underlying iterator before responding true to hasNext(). - * @param - */ -public abstract class PreemptiveIterator implements Iterator, DisposableIterator { - - private static final int UNDETERMINED = 0; - private static final int TRUE = 1; - private static final int FALSE = -1; - private int lastCheck = UNDETERMINED; - - protected abstract boolean internalHasNext(); - - protected abstract T internalNext(); - - public final boolean hasNext() { - if (lastCheck == FALSE) { - return false; - } - if (lastCheck != UNDETERMINED) { - return (lastCheck == TRUE); - } - if (internalHasNext()) { - lastCheck = TRUE; - return true; - } - lastCheck = FALSE; - return false; - } - - public final T next() { - if (!hasNext()) { - throw new NoSuchElementException(); - } - lastCheck = UNDETERMINED; - return internalNext(); - } - - public final void remove() { - throw new UnsupportedOperationException(); - } - - public void close() { - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/CachingManager.java b/src/main/java/org/sakaiproject/nakamura/lite/CachingManager.java deleted file mode 100644 index 5edb1bf0..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/CachingManager.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite; - -import org.sakaiproject.nakamura.api.lite.CacheHolder; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Map; - -public abstract class CachingManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(CachingManager.class); - private Map sharedCache; - private StorageClient client; - private int hit; - private int miss; - private long calls; - - public CachingManager(StorageClient client, Map sharedCache) { - this.client = client; - this.sharedCache = sharedCache; - } - - protected Map getCached(String keySpace, String columnFamily, String key) - throws StorageClientException { - Map m = null; - String cacheKey = getCacheKey(keySpace, columnFamily, key); - - - if (sharedCache != null && sharedCache.containsKey(cacheKey)) { - CacheHolder aclCacheHolder = sharedCache.get(cacheKey); - if (aclCacheHolder != null) { - m = aclCacheHolder.get(); - hit++; - } - } - if (m == null) { - m = client.get(keySpace, columnFamily, key); - miss++; - if (sharedCache != null) { - if (m != null) { - LOGGER.debug("Found Map {} {}", cacheKey, m); - } - sharedCache.put(cacheKey, new CacheHolder(m)); - } - } - calls++; - if ((calls % 1000) == 0) { - getLogger().info("Cache Stats Hits {} Misses {} hit% {}", new Object[] { hit, miss, - ((100 * hit) / (hit + miss)) }); - } - return m; - } - - protected abstract Logger getLogger(); - - private String getCacheKey(String keySpace, String columnFamily, String key) { - return keySpace + ":" + columnFamily + ":" + key; - } - - protected void removeFromCache(String keySpace, String columnFamily, String key) { - if (sharedCache != null) { - sharedCache.remove(getCacheKey(keySpace, columnFamily, key)); - } - } - - - protected void putCached(String keySpace, String columnFamily, String key, - Map encodedProperties, boolean probablyNew) - throws StorageClientException { - removeFromCache(keySpace, columnFamily, key); - client.insert(keySpace, columnFamily, key, encodedProperties, probablyNew); - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/ConfigurationImpl.java b/src/main/java/org/sakaiproject/nakamura/lite/ConfigurationImpl.java deleted file mode 100644 index da2550e2..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/ConfigurationImpl.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite; - -import org.apache.felix.scr.annotations.Activate; -import org.apache.felix.scr.annotations.Component; -import org.apache.felix.scr.annotations.Property; -import org.apache.felix.scr.annotations.Service; -import org.sakaiproject.nakamura.api.lite.Configuration; - -import java.util.Map; - -@Component(immediate = true, metatype = true) -@Service(value = Configuration.class) -public class ConfigurationImpl implements Configuration { - - @Property(value = "ac") - private static final String ACL_COLUMN_FAMILY = "acl-column-family"; - @Property(value = "n") - private static final String KEYSPACE = "keyspace"; - @Property(value = "au") - private static final String AUTHORIZABLE_COLUMN_FAMILY = "authorizable-column-family"; - @Property(value = "cn") - private static final String CONTENT_COLUMN_FAMILY = "content-column-family"; - - private String aclColumnFamily; - private String keySpace; - private String authorizableColumnFamily; - private String contentColumnFamily; - - @Activate - public void activate(Map properties) { - aclColumnFamily = (String) properties.get(ACL_COLUMN_FAMILY); - keySpace = (String) properties.get(KEYSPACE); - authorizableColumnFamily = (String) properties.get(AUTHORIZABLE_COLUMN_FAMILY); - contentColumnFamily = (String) properties.get(CONTENT_COLUMN_FAMILY); - } - - public String getAclColumnFamily() { - return aclColumnFamily; - } - - public String getKeySpace() { - return keySpace; - } - - public String getAuthorizableColumnFamily() { - return authorizableColumnFamily; - } - - public String getContentColumnFamily() { - return contentColumnFamily; - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/LoggingStorageListener.java b/src/main/java/org/sakaiproject/nakamura/lite/LoggingStorageListener.java deleted file mode 100644 index d4f0075d..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/LoggingStorageListener.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.sakaiproject.nakamura.lite; - -import org.sakaiproject.nakamura.api.lite.StoreListener; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; - - -public class LoggingStorageListener implements StoreListener { - - - private static final Logger LOGGER = LoggerFactory.getLogger(LoggingStorageListener.class); - - public void onDelete(String zone, String path, String user, String... attributes) { - LOGGER.info("Delete {} {} {} {} ", new Object[] {zone,path,user,Arrays.toString(attributes)}); - } - - public void onUpdate(String zone, String path, String user, boolean isNew, String... attributes) { - LOGGER.info("Update {} {} {} new:{} {} ", new Object[] {zone,path,user,isNew,Arrays.toString(attributes)}); - } - - public void onLogin(String userId, String sessionId) { - LOGGER.info("Login {} {}", new Object[] {userId, sessionId}); - } - - public void onLogout(String userId, String sessionId) { - LOGGER.info("Logout {} {}", new Object[] {userId, sessionId}); - } - - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AccessControlManagerImpl.java b/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AccessControlManagerImpl.java deleted file mode 100644 index c8c56dde..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/accesscontrol/AccessControlManagerImpl.java +++ /dev/null @@ -1,411 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.accesscontrol; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - -import org.sakaiproject.nakamura.api.lite.CacheHolder; -import org.sakaiproject.nakamura.api.lite.Configuration; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.StorageClientUtils; -import org.sakaiproject.nakamura.api.lite.StoreListener; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessControlManager; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification; -import org.sakaiproject.nakamura.api.lite.accesscontrol.Permission; -import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; -import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; -import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; -import org.sakaiproject.nakamura.api.lite.authorizable.Group; -import org.sakaiproject.nakamura.api.lite.authorizable.User; -import org.sakaiproject.nakamura.lite.CachingManager; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.ConcurrentHashMap; - -public class AccessControlManagerImpl extends CachingManager implements AccessControlManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(AccessControlManagerImpl.class); - private User user; - private String keySpace; - private String aclColumnFamily; - private Map cache = new ConcurrentHashMap(); - private boolean closed; - private StoreListener storeListener; - - public AccessControlManagerImpl(StorageClient client, User currentUser, Configuration config, - Map sharedCache, StoreListener storeListener) { - super(client, sharedCache); - this.user = currentUser; - this.aclColumnFamily = config.getAclColumnFamily(); - this.keySpace = config.getKeySpace(); - closed = false; - this.storeListener = storeListener; - } - - public Map getAcl(String objectType, String objectPath) - throws StorageClientException, AccessDeniedException { - checkOpen(); - check(objectType, objectPath, Permissions.CAN_READ_ACL); - - String key = this.getAclKey(objectType, objectPath); - return ImmutableMap.copyOf(getCached(keySpace, aclColumnFamily, key)); - } - - public Map getEffectiveAcl(String objectType, String objectPath) - throws StorageClientException, AccessDeniedException { - throw new UnsupportedOperationException("Nag someone to implement this"); - } - - public void setAcl(String objectType, String objectPath, AclModification[] aclModifications) - throws StorageClientException, AccessDeniedException { - checkOpen(); - check(objectType, objectPath, Permissions.CAN_WRITE_ACL); - String key = this.getAclKey(objectType, objectPath); - Map currentAcl = getAcl(objectType, objectPath); - Map modifications = Maps.newLinkedHashMap(); - for (AclModification m : aclModifications) { - String name = m.getAceKey(); - if (m.isRemove()) { - modifications.put(name, null); - } else { - - int bitmap = toInt(currentAcl.get(name)); - bitmap = m.modify(bitmap); - modifications.put(name, bitmap); - } - } - LOGGER.debug("Updating ACL {} {} ", key, modifications); - putCached(keySpace, aclColumnFamily, key, modifications, (currentAcl == null || currentAcl.size() == 0)); - storeListener.onUpdate(objectType, objectPath, getCurrentUserId(), false, "op:acl"); - } - - public void check(String objectType, String objectPath, Permission permission) - throws AccessDeniedException, StorageClientException { - if (user.isAdmin()) { - return; - } - // users can always operate on their own user object. - if (Security.ZONE_AUTHORIZABLES.equals(objectType) && user.getId().equals(objectPath)) { - return; - } - int[] privileges = compilePermission(user, objectType, objectPath, 0); - if (!((permission.getPermission() & privileges[0]) == permission.getPermission())) { - throw new AccessDeniedException(objectType, objectPath, permission.getName(), - user.getId()); - } - } - - private String getAclKey(String objectType, String objectPath) { - if (objectPath.startsWith("/")) { - return objectType + objectPath; - } - return objectType + "/" + objectPath; - } - - private int[] compilePermission(Authorizable authorizable, String objectType, - String objectPath, int recursion) throws StorageClientException { - String key = getAclKey(objectType, objectPath); - if (user.getId().equals(authorizable.getId()) && cache.containsKey(key)) { - return cache.get(key); - } else { - LOGGER.debug("Cache Miss {} [{}] ", cache, key); - } - - Map acl = getCached(keySpace, aclColumnFamily, key); - LOGGER.debug("ACL on {} is {} ", key, acl); - - int grants = 0; - int denies = 0; - if (acl != null) { - - { - String principal = authorizable.getId(); - int tg = toInt(acl.get(principal - + AclModification.GRANTED_MARKER)); - int td = toInt(acl - .get(principal + AclModification.DENIED_MARKER)); - grants = grants | tg; - denies = denies | td; - // LOGGER.info("Added Permissions for {} {} result {} {}",new - // Object[]{tg,td,grants,denies}); - - } - for (String principal : authorizable.getPrincipals()) { - int tg = toInt(acl.get(principal - + AclModification.GRANTED_MARKER)); - int td = toInt(acl - .get(principal + AclModification.DENIED_MARKER)); - grants = grants | tg; - denies = denies | td; - // LOGGER.info("Added Permissions for {} {} result {} {}",new - // Object[]{tg,td,grants,denies}); - } - if (!User.ANON_USER.equals(authorizable.getId())) { - // all users except anon are in the group everyone, by default - // but only if not already denied or granted by a more specific - // permission. - int tg = (toInt(acl.get(Group.EVERYONE - + AclModification.GRANTED_MARKER)) & ~denies); - int td = (toInt(acl.get(Group.EVERYONE - + AclModification.DENIED_MARKER)) & ~grants); - // LOGGER.info("Adding Permissions for Everyone {} {} ",tg,td); - grants = grants | tg; - denies = denies | td; - - } - /* - * grants contains the granted permissions in a bitmap denies - * contains the denied permissions in a bitmap - */ - int granted = grants; - int denied = denies; - - /* - * Only look to parent objects if this is not the root object and - * everything is not granted and denied - */ - if (recursion < 20 && !StorageClientUtils.isRoot(objectPath) - && (granted != 0xffff || denied != 0xffff)) { - recursion++; - int[] parentPriv = compilePermission(authorizable, objectType, - StorageClientUtils.getParentObjectPath(objectPath), recursion); - if (parentPriv != null) { - /* - * Grant permission not denied at this level parentPriv[0] - * is permissions granted by the parent ~denies is - * permissions not denied here parentPriv[0] & ~denies is - * permissions granted by the parent that have not been - * denied here. we need to add those to things granted here. - * ie | - */ - granted = grants | (parentPriv[0] & ~denies); - /* - * Deny permissions not granted at this level - */ - denied = denies | (parentPriv[1] & ~grants); - } - } - // If not denied all users and groups can read other users and - // groups and all content can be read - if (((denied & Permissions.CAN_READ.getPermission()) == 0) - && (Security.ZONE_AUTHORIZABLES.equals(objectType) || Security.ZONE_CONTENT - .equals(objectType))) { - granted = granted | Permissions.CAN_READ.getPermission(); - // LOGGER.info("Default Read Permission set {} {} ",key,denied); - } else { - // LOGGER.info("Default Read has been denied {} {} ",key, - // denied); - } - // LOGGER.info("Permissions on {} for {} is {} {} ",new - // Object[]{key,user.getId(),granted,denied}); - /* - * Keep a cached copy - */ - if (user.getId().equals(authorizable.getId())) { - cache.put(key, new int[] { granted, denied }); - } - return new int[] { granted, denied }; - - } - if (Security.ZONE_AUTHORIZABLES.equals(objectType) - || Security.ZONE_CONTENT.equals(objectType)) { - // unless explicitly denied all users can read other users. - return new int[] { Permissions.CAN_READ.getPermission(), 0 }; - } - return new int[] { 0, 0 }; - } - - private int toInt(Object object) { - if ( object instanceof Integer ) { - return ((Integer) object).intValue(); - } - return 0; - } - - public String getCurrentUserId() { - return user.getId(); - } - - public void close() { - closed = true; - } - - private void checkOpen() throws StorageClientException { - if (closed) { - throw new StorageClientException("Access Control Manager is closed"); - } - } - - public boolean can(Authorizable authorizable, String objectType, String objectPath, - Permission permission) { - if (authorizable instanceof User && ((User) authorizable).isAdmin()) { - return true; - } - // users can always operate on their own user object. - if (Security.ZONE_AUTHORIZABLES.equals(objectType) - && authorizable.getId().equals(objectPath)) { - return true; - } - try { - int[] privileges = compilePermission(authorizable, objectType, objectPath, 0); - if (!((permission.getPermission() & privileges[0]) == permission.getPermission())) { - return false; - } - } catch (StorageClientException e) { - LOGGER.warn(e.getMessage(), e); - return false; - } - return true; - } - - public Permission[] getPermissions(String objectType, String path) throws StorageClientException { - int[] perms = compilePermission(this.user, objectType, path, 0); - List permissions = Lists.newArrayList(); - for (Permission p : Permissions.PRIMARY_PERMISSIONS) { - if ((perms[0] & p.getPermission()) == p.getPermission()) { - permissions.add(p); - } - } - return permissions.toArray(new Permission[permissions.size()]); - } - - public String[] findPrincipals(String objectType, String objectPath, int permission, boolean granted) throws StorageClientException { - Map principalMap = internalCompilePrincipals(objectType, objectPath, 0); - LOGGER.debug("Got Principals {} ",principalMap); - List principals = Lists.newArrayList(); - for (Entry perm : principalMap.entrySet()) { - int[] p = perm.getValue(); - if ( granted && (p[0] & permission) == permission ) { - principals.add(perm.getKey()); - LOGGER.debug("Included {} {} {} ",new Object[]{perm.getKey(), perm.getValue(), permission}); - } else if ( !granted && (p[1] & permission) == permission) { - principals.add(perm.getKey()); - LOGGER.debug("Included {} {} {} ",new Object[]{perm.getKey(), perm.getValue(), permission}); - } else { - LOGGER.debug("Filtered {} {} {} ",new Object[]{perm.getKey(), perm.getValue(), permission}); - } - } - LOGGER.debug(" Found Principals {} ",principals); - return principals.toArray(new String[principals.size()]); - } - - - - private Map internalCompilePrincipals(String objectType, String objectPath, int recursion) throws StorageClientException { - Map compiledPermissions = Maps.newHashMap(); - String key = getAclKey(objectType, objectPath); - - Map acl = getCached(keySpace, aclColumnFamily, key); - - if (acl != null) { - LOGGER.debug("Checking {} {} ",key,acl); - for (Entry ace : acl.entrySet()) { - String aceKey = ace.getKey(); - String principal = aceKey.substring(0, aceKey.length() - 2); - - if (!compiledPermissions.containsKey(principal)) { - int tg = toInt(acl.get(principal - + AclModification.GRANTED_MARKER)); - int td = toInt(acl.get(principal - + AclModification.DENIED_MARKER)); - compiledPermissions.put(principal, new int[] { tg, td }); - LOGGER.debug("added {} ",principal); - } - - } - } - /* - * grants contains the granted permissions in a bitmap denies contains - * the denied permissions in a bitmap - */ - - /* - * Only look to parent objects if this is not the root object and - * everything is not granted and denied - */ - if (recursion < 20 && !StorageClientUtils.isRoot(objectPath)) { - recursion++; - Map parentPermissions = internalCompilePrincipals(objectType, - StorageClientUtils.getParentObjectPath(objectPath), recursion); - // add the parernt privileges in - for (Entry parentPermission : parentPermissions.entrySet()) { - int[] thisPriv = new int[2]; - String principal = parentPermission.getKey(); - if (compiledPermissions.containsKey(principal)) { - thisPriv = compiledPermissions.get(principal); - LOGGER.debug("modified {} ",principal); - } else { - LOGGER.debug("creating {} ",principal); - } - int[] parentPriv = parentPermission.getValue(); - - /* - * Grant permission not denied at this level parentPriv[0] is - * permissions granted by the parent ~denies is permissions not - * denied here parentPriv[0] & ~denies is permissions granted by - * the parent that have not been denied here. we need to add - * those to things granted here. ie | - */ - int granted = thisPriv[0] | (parentPriv[0] & ~thisPriv[1]); - /* - * Deny permissions not granted at this level - */ - int denied = thisPriv[1] | (parentPriv[1] & ~thisPriv[0]); - - compiledPermissions.put(principal, new int[] { granted, denied }); - - } - } - - // - // If not denied all users and groups can read other users and - // groups and all content can be read - for (String principal : new String[] { Group.EVERYONE, User.ANON_USER }) { - int[] perm = new int[2]; - if (compiledPermissions.containsKey(principal)) { - perm = compiledPermissions.get(principal); - } - if (((perm[1] & Permissions.CAN_READ.getPermission()) == 0) - && (Security.ZONE_AUTHORIZABLES.equals(objectType) || Security.ZONE_CONTENT - .equals(objectType))) { - perm[0] = perm[0] | Permissions.CAN_READ.getPermission(); - LOGGER.debug("added Default {} ",principal); - compiledPermissions.put(principal, perm); - } - } - compiledPermissions.put(User.ADMIN_USER, new int[] { 0xffff, 0x0000}); - return compiledPermissions; - // only store those permissions the match the requested set.] - - - } - - @Override - protected Logger getLogger() { - return LOGGER; - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/authorizable/GroupInternal.java b/src/main/java/org/sakaiproject/nakamura/lite/authorizable/GroupInternal.java deleted file mode 100644 index c13e3d7c..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/authorizable/GroupInternal.java +++ /dev/null @@ -1,23 +0,0 @@ -package org.sakaiproject.nakamura.lite.authorizable; - -import com.google.common.collect.ImmutableMap; - -import org.sakaiproject.nakamura.api.lite.authorizable.Group; - -import java.util.Map; - -public class GroupInternal extends Group { - - public GroupInternal(Map groupMap, boolean objectIsNew) { - super(groupMap); - setObjectNew(objectIsNew); - } - - public GroupInternal(ImmutableMap groupMap, boolean objectIsNew, boolean readOnly) { - super(groupMap); - setObjectNew(objectIsNew); - setReadOnly(readOnly); - } - - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/authorizable/UserInternal.java b/src/main/java/org/sakaiproject/nakamura/lite/authorizable/UserInternal.java deleted file mode 100644 index 306ff552..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/authorizable/UserInternal.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.sakaiproject.nakamura.lite.authorizable; - -import org.sakaiproject.nakamura.api.lite.authorizable.User; - -import java.util.Map; - -public class UserInternal extends User { - - public UserInternal(Map groupMap, boolean objectIsNew) { - super(groupMap); - setObjectNew(objectIsNew); - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/content/ContentManagerImpl.java b/src/main/java/org/sakaiproject/nakamura/lite/content/ContentManagerImpl.java deleted file mode 100644 index 7113086f..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/content/ContentManagerImpl.java +++ /dev/null @@ -1,694 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.content; - -import static org.sakaiproject.nakamura.lite.content.InternalContent.BLOCKID_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.BODY_CREATED; -import static org.sakaiproject.nakamura.lite.content.InternalContent.BODY_CREATED_BY; -import static org.sakaiproject.nakamura.lite.content.InternalContent.BODY_LAST_MODIFIED; -import static org.sakaiproject.nakamura.lite.content.InternalContent.BODY_LAST_MODIFIED_BY; -import static org.sakaiproject.nakamura.lite.content.InternalContent.COPIED_DEEP; -import static org.sakaiproject.nakamura.lite.content.InternalContent.COPIED_FROM_ID; -import static org.sakaiproject.nakamura.lite.content.InternalContent.COPIED_FROM_PATH; -import static org.sakaiproject.nakamura.lite.content.InternalContent.CREATED; -import static org.sakaiproject.nakamura.lite.content.InternalContent.CREATED_BY; -import static org.sakaiproject.nakamura.lite.content.InternalContent.DELETED_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.LASTMODIFIED; -import static org.sakaiproject.nakamura.lite.content.InternalContent.LASTMODIFIED_BY; -import static org.sakaiproject.nakamura.lite.content.InternalContent.LENGTH_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.LINKED_PATH_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.NEXT_VERSION_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.PATH_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.PREVIOUS_BLOCKID_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.PREVIOUS_VERSION_UUID_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.READONLY_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.STRUCTURE_UUID_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.TRUE; -import static org.sakaiproject.nakamura.lite.content.InternalContent.UUID_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.VERSION_HISTORY_ID_FIELD; -import static org.sakaiproject.nakamura.lite.content.InternalContent.VERSION_NUMBER; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; - -import org.sakaiproject.nakamura.api.lite.CacheHolder; -import org.sakaiproject.nakamura.api.lite.Configuration; -import org.sakaiproject.nakamura.api.lite.RemoveProperty; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.StorageClientUtils; -import org.sakaiproject.nakamura.api.lite.StoreListener; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessControlManager; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; -import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; -import org.sakaiproject.nakamura.api.lite.content.Content; -import org.sakaiproject.nakamura.api.lite.content.ContentManager; -import org.sakaiproject.nakamura.lite.CachingManager; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -/** - *
    - * Content Manager.
    - * Manages two types of content,
    - * Bundles of content properties and bodies.
    - * Bodies are chunked into sizes to aide efficiency when retrieving the content.
    - * 
    - * CF content stores the structure of the content keyed by path.
    - * Each item contains child names in columns + the guid of the item
    - * eg
    - *   path : {
    - *       ':id' : thisitemUUID,
    - *       subitemA : subitemAUUID,
    - *       subitemB : subitemBUUID
    - *   }
    - * the guid of the item points to the CF content version where items are keyed by the version.
    - * These items also contain child nodes under children as an array
    - * 
    - * eg
    - *    itemUUID : {
    - *         'id' : thisitemUUID
    - *         'children' : [ 
    - *           subitemA : subitemAUUID,
    - *           subitemB : subitemBUUID
    - *         ],
    - *         'nblocks' = numberOfBlocksSetsOfContent
    - *         'length' = totalLenghtOftheContent
    - *         'blocksize' = storageBlockSize
    - *         'blockid' = blockID
    - *         ... other properties ...
    - *    }
    - *    
    - * The content blocks are stored in CF content body
    - * eg
    - *   blockID:blockSetNumber : {
    - *         'id' : blockID,
    - *         'numblocks' : numberOfBlocksInThisSet,
    - *         'blocklength0' : lengthOfThisBlock,
    - *         'body0' : byte[]
    - *         'blocklength1' : lengthOfThisBlock,
    - *         'body1' : byte[]
    - *         ...
    - *         'blocklengthn' : lengthOfThisBlock,
    - *         'bodyn' : byte[]
    - *    }
    - * 
    - * 
    - * Versioning:
    - * 
    - * When a version is saved, the CF contentVersion item is cloned and the CF content :id and any subitems IDs are updated.
    - * Block 0 is marked as readonly
    - * 
    - * When the body is written to its CF content row is checked to see if the block is read only. If so a new block is created with and linked in with 'previousversion'
    - * A version object is also created to keep track of the versions.
    - * 
    - * 
    - * - * @author ieb - * - */ -public class ContentManagerImpl extends CachingManager implements ContentManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(ContentManagerImpl.class); - - /** - * Key containing deleted items. - */ - private static final String DELETEDITEMS_KEY = ":deleteditems"; - - private static final Set DEEP_COPY_FILTER = ImmutableSet.of(LASTMODIFIED, - LASTMODIFIED_BY, UUID_FIELD, PATH_FIELD); - - /** - * Storage Client - */ - private StorageClient client; - /** - * The access control manager in use. - */ - private AccessControlManager accessControlManager; - /** - * Key space for this content. - */ - private String keySpace; - /** - * Column Family for this content. - */ - private String contentColumnFamily; - - private boolean closed; - - private StoreListener eventListener; - - public ContentManagerImpl(StorageClient client, AccessControlManager accessControlManager, - Configuration config, Map sharedCache, StoreListener eventListener) { - super(client, sharedCache); - this.client = client; - this.accessControlManager = accessControlManager; - keySpace = config.getKeySpace(); - contentColumnFamily = config.getContentColumnFamily(); - closed = false; - this.eventListener = eventListener; - } - - // TODO: Unit test - public boolean exists(String path) { - try { - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); - Map structure = getCached(keySpace, contentColumnFamily, path); - return (structure != null && structure.size() > 0); - } catch (AccessDeniedException e) { - LOGGER.debug(e.getMessage(), e); - } catch (StorageClientException e) { - LOGGER.debug(e.getMessage(), e); - } - return false; - } - - public Content get(String path) throws StorageClientException, AccessDeniedException { - checkOpen(); - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); - Map structure = getCached(keySpace, contentColumnFamily, path); - if (structure != null && structure.size() > 0) { - String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); - Map content = getCached(keySpace, contentColumnFamily, contentId); - if (content != null && content.size() > 0) { - Content contentObject = new Content(path, content); - ((InternalContent) contentObject).internalize(structure, this, false); - return contentObject; - } - } - return null; - - } - - public void update(Content excontent) throws AccessDeniedException, StorageClientException { - checkOpen(); - InternalContent content = (InternalContent) excontent; - String path = content.getPath(); - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_WRITE); - String id = null; - Map toSave = null; - if (content.isNew()) { - // create the parents if necessary - if (!StorageClientUtils.isRoot(path)) { - String parentPath = StorageClientUtils.getParentObjectPath(path); - Content parentContent = get(parentPath); - if (parentContent == null) { - update(new Content(parentPath, null)); - } - } - toSave = Maps.newHashMap(content.getPropertiesForUpdate()); - id = StorageClientUtils.getUuid(); - toSave.put(UUID_FIELD, id); - toSave.put(PATH_FIELD, path); - toSave.put(CREATED, System.currentTimeMillis()); - toSave.put(CREATED_BY, - accessControlManager.getCurrentUserId()); - toSave.put(LASTMODIFIED, System.currentTimeMillis()); - toSave.put(LASTMODIFIED_BY, - accessControlManager.getCurrentUserId()); - LOGGER.debug("New Content with {} {} ", id, toSave); - } else if (content.isUpdated()) { - toSave = Maps.newHashMap(content.getPropertiesForUpdate()); - id = (String)toSave.get(UUID_FIELD); - toSave.put(LASTMODIFIED, System.currentTimeMillis()); - toSave.put(LASTMODIFIED_BY, - accessControlManager.getCurrentUserId()); - LOGGER.debug("Updating Content with {} {} ", id, toSave); - } else { - // if not new or updated, dont update. - return; - } - - Map checkContent = getCached(keySpace, contentColumnFamily, id); - if (TRUE.equals((String)checkContent.get(READONLY_FIELD))) { - throw new AccessDeniedException(Security.ZONE_CONTENT, path, - "update on read only Content Item (possibly a previous version of the item)", - accessControlManager.getCurrentUserId()); - } - boolean isnew = false; - if (content.isNew()) { - isnew = true; - // only when new do we update the structure. - if (!StorageClientUtils.isRoot(path)) { - putCached(keySpace, contentColumnFamily, - StorageClientUtils.getParentObjectPath(path), - ImmutableMap.of(StorageClientUtils.getObjectName(path), (Object)id), true); - } - putCached(keySpace, contentColumnFamily, path, - ImmutableMap.of(STRUCTURE_UUID_FIELD, (Object)id), true); - } - // save the content id. - putCached(keySpace, contentColumnFamily, id, toSave, isnew); - LOGGER.debug("Saved {} at {} as {} ", new Object[] { path, id, toSave }); - // reset state to unmodified to take further modifications. - content.reset(getCached(keySpace, contentColumnFamily, id)); - eventListener.onUpdate(Security.ZONE_CONTENT, path, accessControlManager.getCurrentUserId(), isnew, "op:update"); - } - - public void delete(String path) throws AccessDeniedException, StorageClientException { - checkOpen(); - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_DELETE); - Map structure = getCached(keySpace, contentColumnFamily, path); - if ( structure != null && structure.size() > 0 ) { - String uuid = (String)structure.get(STRUCTURE_UUID_FIELD); - removeFromCache(keySpace, contentColumnFamily, path); - client.remove(keySpace, contentColumnFamily, path); - Map m = new HashMap(); - m.put(StorageClientUtils.getObjectName(path), null); - putCached(keySpace, contentColumnFamily, StorageClientUtils.getParentObjectPath(path), - m, false); - putCached(keySpace, contentColumnFamily, uuid, - ImmutableMap.of(DELETED_FIELD, (Object) TRUE), false); - putCached(keySpace, contentColumnFamily, DELETEDITEMS_KEY, - ImmutableMap.of(uuid, (Object)path), false); - eventListener.onDelete(Security.ZONE_CONTENT, path, accessControlManager.getCurrentUserId()); - } - } - - public long writeBody(String path, InputStream in) throws StorageClientException, - AccessDeniedException, IOException { - return writeBody(path, in, null); - } - - public long writeBody(String path, InputStream in, String streamId) - throws StorageClientException, AccessDeniedException, IOException { - checkOpen(); - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_WRITE); - Map structure = getCached(keySpace, contentColumnFamily, path); - String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); - Map content = getCached(keySpace, contentColumnFamily, contentId); - boolean isnew = true; - String blockIdField = StorageClientUtils.getAltField(BLOCKID_FIELD, streamId); - if (content.containsKey(blockIdField)) { - isnew = false; - } - String contentBlockId = StorageClientUtils.getUuid(); - - Map metadata = client.streamBodyIn(keySpace, contentColumnFamily, - contentId, contentBlockId, streamId, content, in); - metadata.put(StorageClientUtils.getAltField(BODY_LAST_MODIFIED, streamId), - System.currentTimeMillis()); - metadata.put(StorageClientUtils.getAltField(BODY_LAST_MODIFIED_BY, streamId), - accessControlManager.getCurrentUserId()); - if (isnew) { - metadata.put(StorageClientUtils.getAltField(BODY_CREATED, streamId), - System.currentTimeMillis()); - metadata.put(StorageClientUtils.getAltField(BODY_CREATED_BY, streamId), - accessControlManager.getCurrentUserId()); - } - putCached(keySpace, contentColumnFamily, contentId, metadata, isnew); - long length = (Long) metadata.get(LENGTH_FIELD); - eventListener.onUpdate(Security.ZONE_CONTENT, path, accessControlManager.getCurrentUserId(), false, "stream", streamId); - return length; - - } - - public InputStream getInputStream(String path) throws StorageClientException, - AccessDeniedException, IOException { - return getInputStream(path, null); - } - - public InputStream getInputStream(String path, String streamId) throws StorageClientException, - AccessDeniedException, IOException { - checkOpen(); - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); - Map structure = getCached(keySpace, contentColumnFamily, path); - LOGGER.debug("Structure Loaded {} {} ", path, structure); - String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); - return internalGetInputStream(contentId, streamId); - } - - private InputStream internalGetInputStream(String contentId, String streamId) - throws StorageClientException, AccessDeniedException, IOException { - Map content = getCached(keySpace, contentColumnFamily, contentId); - String contentBlockId = (String)content.get(StorageClientUtils - .getAltField(BLOCKID_FIELD, streamId)); - return client.streamBodyOut(keySpace, contentColumnFamily, contentId, contentBlockId, streamId, - content); - } - - public void close() { - closed = true; - } - - private void checkOpen() throws StorageClientException { - if (closed) { - throw new StorageClientException("Content Manager is closed"); - } - } - - // TODO: Unit test - public void copy(String from, String to, boolean deep) throws StorageClientException, - AccessDeniedException, IOException { - checkOpen(); - // To Copy, get the to object out and copy everything over. - Content f = get(from); - if (f == null) { - throw new StorageClientException(" Source content " + from + " does not exist"); - } - Content t = get(to); - if (t != null) { - delete(to); - } - Set streams = Sets.newHashSet(); - Map copyProperties = Maps.newHashMap(); - if (deep) { - for (Entry p : f.getProperties().entrySet()) { - if (!DEEP_COPY_FILTER.contains(p.getKey())) { - if (p.getKey().startsWith(BLOCKID_FIELD)) { - streams.add(p.getKey()); - } else { - copyProperties.put(p.getKey(), p.getValue()); - } - } - } - } else { - copyProperties.putAll(f.getProperties()); - } - copyProperties.put(COPIED_FROM_PATH, from); - copyProperties.put(COPIED_FROM_ID, f.getProperty(UUID_FIELD)); - copyProperties.put(COPIED_DEEP, deep); - t = new Content(to, copyProperties); - update(t); - - for (String stream : streams) { - String streamId = null; - if (stream.length() > BLOCKID_FIELD.length()) { - streamId = stream.substring(BLOCKID_FIELD.length() + 1); - } - InputStream fromStream = getInputStream(from, streamId); - writeBody(to, fromStream); - fromStream.close(); - } - eventListener.onUpdate(Security.ZONE_CONTENT, to, accessControlManager.getCurrentUserId(), true, "op:copy"); - - } - - // TODO: Unit test - public void move(String from, String to) throws AccessDeniedException, StorageClientException { - // to move, get the structure object out and modify, recreating parent - // objects as necessary. - checkOpen(); - accessControlManager.check(Security.ZONE_CONTENT, from, Permissions.CAN_ANYTHING); - accessControlManager.check(Security.ZONE_CONTENT, to, - Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); - Map fromStructure = getCached(keySpace, contentColumnFamily, from); - if (fromStructure == null || fromStructure.size() == 0) { - throw new StorageClientException("The source content to move from " + from - + " does not exist, move operation failed"); - } - Map toStructure = getCached(keySpace, contentColumnFamily, to); - if (toStructure != null && toStructure.size() > 0) { - throw new StorageClientException("The destination content to move to " + to - + " exists, move operation failed"); - } - String idStore = (String) fromStructure.get(STRUCTURE_UUID_FIELD); - - // move the conent to the new location, then delete the old. - if (!StorageClientUtils.isRoot(to)) { - // if not a root, modify the new parent location, creating the - // structured if necessary - String parent = StorageClientUtils.getParentObjectPath(to); - Map parentToStructure = getCached(keySpace, contentColumnFamily, - parent); - if (parentToStructure == null || parentToStructure.size() == 0) { - // create a new parent - Content content = new Content(parent, null); - update(content); - } - - putCached(keySpace, contentColumnFamily, parent, - ImmutableMap.of(StorageClientUtils.getObjectName(to),(Object)idStore), true); - } - // update the content data to reflect the new primary location. - putCached(keySpace, contentColumnFamily, idStore, - ImmutableMap.of(PATH_FIELD, (Object)to), false); - - // insert the new to Structure and remove the from - putCached(keySpace, contentColumnFamily, to, fromStructure, true); - - // now remove the old location. - if (!StorageClientUtils.isRoot(from)) { - // if it was not a root, then modify the old parent location. - String fromParent = StorageClientUtils.getParentObjectPath(from); - putCached(keySpace, contentColumnFamily, fromParent, ImmutableMap.of( - StorageClientUtils.getObjectName(from), (Object) new RemoveProperty()), false); - } - // remove the old from. - removeFromCache(keySpace, contentColumnFamily, from); - client.remove(keySpace, contentColumnFamily, from); - eventListener.onDelete(Security.ZONE_CONTENT, from, accessControlManager.getCurrentUserId(), "op:move"); - eventListener.onUpdate(Security.ZONE_CONTENT, to, accessControlManager.getCurrentUserId(), true, "op:move"); - - } - - // TODO: Unit test - public void link(String from, String to) throws AccessDeniedException, StorageClientException { - // a link places a pointer to the content in the parent of from, but - // does not delete or modify the structure of to. - // read from is required and write to. - checkOpen(); - accessControlManager.check(Security.ZONE_CONTENT, to, Permissions.CAN_READ); - accessControlManager.check(Security.ZONE_CONTENT, from, - Permissions.CAN_READ.combine(Permissions.CAN_WRITE)); - Map toStructure = getCached(keySpace, contentColumnFamily, to); - if (toStructure == null || toStructure.size() == 0) { - throw new StorageClientException("The source content to link from " + to - + " does not exist, link operation failed"); - } - Map fromStructure = getCached(keySpace, contentColumnFamily, from); - if (fromStructure != null && fromStructure.size() > 0) { - throw new StorageClientException("The destination content to link to " + from - + " exists, link operation failed"); - } - - if (StorageClientUtils.isRoot(from)) { - throw new StorageClientException("The link " + to - + " is a root, not possible to create a soft link"); - } - - // create a new structure object pointing back to the shared location - - Object idStore = toStructure.get(STRUCTURE_UUID_FIELD); - // if not a root, modify the new parent location, creating the - // structured if necessary - String parent = StorageClientUtils.getParentObjectPath(from); - Map parentToStructure = getCached(keySpace, contentColumnFamily, parent); - if (parentToStructure == null || parentToStructure.size() == 0) { - // create a new parent - Content content = new Content(parent, null); - update(content); - } - - putCached(keySpace, contentColumnFamily, parent, - ImmutableMap.of(StorageClientUtils.getObjectName(from), idStore), false); - // create the new object for the path, pointing to the Object - putCached(keySpace, contentColumnFamily, from, ImmutableMap.of(STRUCTURE_UUID_FIELD, - idStore, LINKED_PATH_FIELD, to), true); - - } - - public String saveVersion(String path) throws StorageClientException, AccessDeniedException { - checkOpen(); - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_WRITE); - Map structure = getCached(keySpace, contentColumnFamily, path); - String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); - Map saveVersion = getCached(keySpace, contentColumnFamily, contentId); - - // versionHistoryId is the UUID of the version history for this node. - - String saveVersionId = (String)saveVersion.get(UUID_FIELD); - - String versionHistoryId = (String)saveVersion.get(VERSION_HISTORY_ID_FIELD); - - if (versionHistoryId == null) { - versionHistoryId = StorageClientUtils.getUuid(); - LOGGER.debug("Created new Version History UUID as {} for Object {} ",versionHistoryId, saveVersionId); - saveVersion.put(VERSION_HISTORY_ID_FIELD, versionHistoryId); - } else { - LOGGER.debug("Created new Version History UUID as {} for Object {} ",versionHistoryId, saveVersionId); - - } - - Map newVersion = Maps.newHashMap(saveVersion); - String newVersionId = StorageClientUtils.getUuid(); - - - String saveBlockId = (String)saveVersion.get(BLOCKID_FIELD); - - newVersion.put(UUID_FIELD, newVersionId); - newVersion.put(PREVIOUS_VERSION_UUID_FIELD, saveVersionId); - if (saveBlockId != null) { - newVersion.put(PREVIOUS_BLOCKID_FIELD, saveBlockId); - } - - saveVersion.put(NEXT_VERSION_FIELD, newVersionId); - saveVersion.put(READONLY_FIELD, TRUE); - Object versionNumber = System.currentTimeMillis(); - saveVersion.put(VERSION_NUMBER, versionNumber); - - putCached(keySpace, contentColumnFamily, saveVersionId, saveVersion, false); - putCached(keySpace, contentColumnFamily, newVersionId, newVersion, true); - putCached(keySpace, contentColumnFamily, versionHistoryId, - ImmutableMap.of(saveVersionId, versionNumber), true); - putCached(keySpace, contentColumnFamily, path, - ImmutableMap.of(STRUCTURE_UUID_FIELD, (Object)newVersionId), true); - if (!path.equals("/")) { - putCached(keySpace, contentColumnFamily, - StorageClientUtils.getParentObjectPath(path), - ImmutableMap.of(StorageClientUtils.getObjectName(path), (Object)newVersionId), true); - } - if ( LOGGER.isDebugEnabled() ) { - LOGGER.debug("Saved Version History {} {} ", versionHistoryId, - getCached(keySpace, contentColumnFamily, versionHistoryId)); - LOGGER.debug("Saved Version [{}] {}", saveVersionId, saveVersion); - LOGGER.debug("New Version [{}] {}", newVersionId, newVersion); - LOGGER.debug("Structure {} ", getCached(keySpace, contentColumnFamily, path)); - LOGGER.debug( - "Parent Structure {} ", - getCached(keySpace, contentColumnFamily, - StorageClientUtils.getParentObjectPath(path))); - } - return saveVersionId; - } - - public List getVersionHistory(String path) throws AccessDeniedException, - StorageClientException { - checkOpen(); - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); - Map structure = getCached(keySpace, contentColumnFamily, path); - if (structure != null && structure.size() > 0) { - String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); - Map content = getCached(keySpace, contentColumnFamily, contentId); - if (content != null && content.size() > 0) { - String versionHistoryId = (String)content - .get(VERSION_HISTORY_ID_FIELD); - if (versionHistoryId != null) { - final Map versionHistory = getCached(keySpace, - contentColumnFamily, versionHistoryId); - LOGGER.debug("Loaded Version History {} {} ", versionHistoryId, versionHistory); - return Lists.sortedCopy(versionHistory.keySet(), new Comparator() { - public int compare(String o1, String o2) { - long l1 = (Long) versionHistory.get(o1); - long l2 = (Long) versionHistory.get(o2); - long r = l2 - l1; - if (r == 0) { - return 0; - } else if (r < 0) { - return -1; - } - return 1; - } - }); - - } - } - } - return Collections.emptyList(); - } - - // TODO: Unit test - public InputStream getVersionInputStream(String path, String versionId) - throws AccessDeniedException, StorageClientException, IOException { - return getVersionInputStream(path, versionId, null); - } - - // TODO: Unit test - public InputStream getVersionInputStream(String path, String versionId, String streamId) - throws AccessDeniedException, StorageClientException, IOException { - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); - checkOpen(); - Map structure = getCached(keySpace, contentColumnFamily, path); - if (structure != null && structure.size() > 0) { - String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); - Map content = getCached(keySpace, contentColumnFamily, contentId); - if (content != null && content.size() > 0) { - String versionHistoryId = (String)content - .get(VERSION_HISTORY_ID_FIELD); - if (versionHistoryId != null) { - Map versionHistory = getCached(keySpace, contentColumnFamily, - versionHistoryId); - if (versionHistory != null && versionHistory.containsKey(versionId)) { - return internalGetInputStream(versionId, streamId); - } - } - } - } - return null; - } - - public Content getVersion(String path, String versionId) throws StorageClientException, - AccessDeniedException { - checkOpen(); - accessControlManager.check(Security.ZONE_CONTENT, path, Permissions.CAN_READ); - Map structure = getCached(keySpace, contentColumnFamily, path); - if (structure != null && structure.size() > 0) { - String contentId = (String)structure.get(STRUCTURE_UUID_FIELD); - Map content = getCached(keySpace, contentColumnFamily, contentId); - if (content != null && content.size() > 0) { - String versionHistoryId = (String)content - .get(VERSION_HISTORY_ID_FIELD); - if (versionHistoryId != null) { - Map versionHistory = getCached(keySpace, contentColumnFamily, - versionHistoryId); - if (versionHistory != null && versionHistory.containsKey(versionId)) { - Map versionContent = getCached(keySpace, - contentColumnFamily, versionId); - if (versionContent != null && versionContent.size() > 0) { - Content contentObject = new Content(path, versionContent); - ((InternalContent) contentObject).internalize(structure, this, true); - return contentObject; - } else { - LOGGER.debug("No Content for path {} version History Null{} ", path, - versionHistoryId); - - } - } else { - LOGGER.debug("History null for path {} version History {} {} ", - new Object[] { path, versionHistoryId, versionHistory }); - } - } else { - LOGGER.debug("History Id null for path {} ", path); - } - } - } - return null; - } - - @Override - protected Logger getLogger() { - return LOGGER; - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/StorageClient.java b/src/main/java/org/sakaiproject/nakamura/lite/storage/StorageClient.java deleted file mode 100644 index 0422698d..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/StorageClient.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.storage; - -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Iterator; -import java.util.Map; - -public interface StorageClient { - - Map get(String keySpace, String columnFamily, String key) - throws StorageClientException; - - void insert(String keySpace, String columnFamily, String key, Map values, boolean probablyNew) - throws StorageClientException; - - void remove(String keySpace, String columnFamily, String key) throws StorageClientException; - - InputStream streamBodyOut(String keySpace, String columnFamily, String contentId, - String contentBlockId, String streamId, Map content) throws StorageClientException, - AccessDeniedException, IOException; - - Map streamBodyIn(String keySpace, String columnFamily, String contentId, - String contentBlockId, String streamId, Map content, InputStream in) - throws StorageClientException, AccessDeniedException, IOException; - - Iterator> find(String keySpace, String authorizableColumnFamily, - Map properties) throws StorageClientException; - - void close(); - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClient.java b/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClient.java deleted file mode 100644 index a7f6d7cb..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/cassandra/CassandraClient.java +++ /dev/null @@ -1,258 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.storage.cassandra; - -import com.google.common.collect.Lists; - -import org.apache.cassandra.thrift.Cassandra.Client; -import org.apache.cassandra.thrift.Column; -import org.apache.cassandra.thrift.ColumnOrSuperColumn; -import org.apache.cassandra.thrift.ColumnParent; -import org.apache.cassandra.thrift.ColumnPath; -import org.apache.cassandra.thrift.ConsistencyLevel; -import org.apache.cassandra.thrift.Deletion; -import org.apache.cassandra.thrift.InvalidRequestException; -import org.apache.cassandra.thrift.Mutation; -import org.apache.cassandra.thrift.SlicePredicate; -import org.apache.cassandra.thrift.SliceRange; -import org.apache.cassandra.thrift.SuperColumn; -import org.apache.cassandra.thrift.TimedOutException; -import org.apache.cassandra.thrift.UnavailableException; -import org.apache.thrift.TException; -import org.apache.thrift.protocol.TProtocol; -import org.apache.thrift.transport.TSocket; -import org.apache.thrift.transport.TTransportException; -import org.sakaiproject.nakamura.api.lite.RemoveProperty; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.StorageClientUtils; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.api.lite.content.Content; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.content.BlockSetContentHelper; -import org.sakaiproject.nakamura.lite.storage.DisposableIterator; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -public class CassandraClient extends Client implements StorageClient { - - private static final Logger LOGGER = LoggerFactory.getLogger(CassandraClient.class); - public static final String CONFIG_BLOCK_SIZE = "block-size"; - public static final String CONFIG_MAX_CHUNKS_PER_BLOCK = "chunks-per-block"; - - private static final int DEFAULT_BLOCK_SIZE = 1024 * 1024; - private static final int DEFAULT_MAX_CHUNKS_PER_BLOCK = 64; - - private TSocket tSocket; - private BlockContentHelper contentHelper; - private int blockSize; - private int maxChunksPerBlockSet; - private CassandraClientPool pool; - - public CassandraClient(CassandraClientPool pool, TProtocol tProtocol, TSocket tSocket, - Map properties) { - super(tProtocol); - this.tSocket = tSocket; - this.pool = pool; - contentHelper = new BlockSetContentHelper(this); - blockSize = StorageClientUtils.getSetting(properties.get(CONFIG_BLOCK_SIZE), - DEFAULT_BLOCK_SIZE); - maxChunksPerBlockSet = StorageClientUtils.getSetting( - properties.get(CONFIG_MAX_CHUNKS_PER_BLOCK), DEFAULT_MAX_CHUNKS_PER_BLOCK); - - } - - public void close() { - pool.releaseClient(this); - } - - public void destroy() { - try { - if (tSocket.isOpen()) { - tSocket.flush(); - tSocket.close(); - } - } catch (TTransportException e) { - LOGGER.error("Failed to close the connection to the cassandra store.", e); - } - } - - public void passivate() { - } - - public void activate() { - } - - public void validate() throws TException { - describe_version(); - } - - public Map get(String keySpace, String columnFamily, String key) - throws StorageClientException { - try { - Map row = new HashMap(); - - SlicePredicate predicate = new SlicePredicate(); - SliceRange sliceRange = new SliceRange(); - sliceRange.setStart(new byte[0]); - sliceRange.setFinish(new byte[0]); - predicate.setSlice_range(sliceRange); - - ColumnParent parent = new ColumnParent(columnFamily); - List results = get_slice(keySpace, key, parent, predicate, - ConsistencyLevel.ONE); - for (ColumnOrSuperColumn result : results) { - if (result.isSetSuper_column()) { - Map sc = new HashMap(); - for (Column column : result.super_column.columns) { - sc.put(StorageClientUtils.toString(column.name), column.value); - } - row.put(StorageClientUtils.toString(result.super_column.name), sc); - } else { - row.put(StorageClientUtils.toString(result.column.name), result.column.value); - } - } - return row; - } catch (InvalidRequestException e) { - throw new StorageClientException(e.getMessage(), e); - } catch (UnavailableException e) { - throw new StorageClientException(e.getMessage(), e); - } catch (TimedOutException e) { - throw new StorageClientException(e.getMessage(), e); - } catch (TException e) { - throw new StorageClientException(e.getMessage(), e); - } - } - - public void insert(String keySpace, String columnFamily, String key, Map values, boolean probablyNew) - throws StorageClientException { - try { - Map>> mutation = new HashMap>>(); - Map> columnMutations = new HashMap>(); - LOGGER.debug("Saving changes to {}:{}:{} ", - new Object[] { keySpace, columnFamily, key }); - List keyMutations = Lists.newArrayList(); - columnMutations.put(columnFamily, keyMutations); - mutation.put(key, columnMutations); - for (Entry value : values.entrySet()) { - String name = value.getKey(); - byte[] bname = StorageClientUtils.toBytes(name); - Object v = value.getValue(); - if (v instanceof RemoveProperty) { - Deletion deletion = new Deletion(); - SlicePredicate deletionPredicate = new SlicePredicate(); - deletionPredicate.addToColumn_names(bname); - deletion.setPredicate(deletionPredicate); - Mutation mu = new Mutation(); - mu.setDeletion(deletion); - keyMutations.add(mu); - } else if (v instanceof byte[]) { - byte[] bv = (byte[]) v; - Column column = new Column(bname, bv, System.currentTimeMillis()); - ColumnOrSuperColumn csc = new ColumnOrSuperColumn(); - csc.setColumn(column); - Mutation mu = new Mutation(); - mu.setColumn_or_supercolumn(csc); - keyMutations.add(mu); - } else if (v instanceof Map) { - @SuppressWarnings("unchecked") - Map sc = (Map) v; - List columns = new ArrayList(); - for (Entry sce : sc.entrySet()) { - String cname = sce.getKey(); - byte[] bcname = StorageClientUtils.toBytes(cname); - Column column = new Column(bcname, StorageClientUtils.toBytes(sce - .getValue()), System.currentTimeMillis()); - columns.add(column); - } - - SuperColumn superColumn = new SuperColumn(bname, columns); - ColumnOrSuperColumn csc = new ColumnOrSuperColumn(); - csc.setSuper_column(superColumn); - Mutation mu = new Mutation(); - mu.setColumn_or_supercolumn(csc); - keyMutations.add(mu); - } else { - byte[] bv = StorageClientUtils.toBytes(v); - Column column = new Column(bname, bv, System.currentTimeMillis()); - ColumnOrSuperColumn csc = new ColumnOrSuperColumn(); - csc.setColumn(column); - Mutation mu = new Mutation(); - mu.setColumn_or_supercolumn(csc); - keyMutations.add(mu); - } - } - LOGGER.debug("Mutation {} ", mutation); - batch_mutate(keySpace, mutation, ConsistencyLevel.ONE); - } catch (InvalidRequestException e) { - throw new StorageClientException(e.getMessage(), e); - } catch (UnavailableException e) { - throw new StorageClientException(e.getMessage(), e); - } catch (TimedOutException e) { - throw new StorageClientException(e.getMessage(), e); - } catch (TException e) { - throw new StorageClientException(e.getMessage(), e); - } - } - - public void remove(String keySpace, String columnFamily, String key) - throws StorageClientException { - ColumnPath cp = new ColumnPath(columnFamily); - try { - remove(keySpace, key, cp, System.currentTimeMillis(), ConsistencyLevel.ONE); - } catch (InvalidRequestException e) { - throw new StorageClientException(e.getMessage(), e); - } catch (UnavailableException e) { - throw new StorageClientException(e.getMessage(), e); - } catch (TimedOutException e) { - throw new StorageClientException(e.getMessage(), e); - } catch (TException e) { - throw new StorageClientException(e.getMessage(), e); - } - } - - public Map streamBodyIn(String keySpace, String contentColumnFamily, - String contentId, String contentBlockId, String streamId, Map content, InputStream in) - throws StorageClientException, AccessDeniedException, IOException { - return contentHelper.writeBody(keySpace, contentColumnFamily, contentId, contentBlockId, streamId, - blockSize, maxChunksPerBlockSet, in); - } - - public InputStream streamBodyOut(String keySpace, String contentColumnFamily, String contentId, - String contentBlockId, String streamId, Map content) throws StorageClientException, - AccessDeniedException { - - int nBlocks = StorageClientUtils.toInt(content.get(Content.NBLOCKS_FIELD)); - return contentHelper.readBody(keySpace, contentColumnFamily, contentBlockId, streamId, nBlocks); - } - - public DisposableIterator> find(String keySpace, - String authorizableColumnFamily, Map properties) { - // TODO: Implement - throw new UnsupportedOperationException(); - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionHolder.java b/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionHolder.java deleted file mode 100644 index 9589466f..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/ConnectionHolder.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.storage.jdbc; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.SQLException; - -public class ConnectionHolder { - - private static final Logger LOGGER = LoggerFactory.getLogger(ConnectionHolder.class); - private static final long TTL = 3600000L; - private Connection connection; - private long lastUsed; - - public ConnectionHolder(Connection connection) { - this.lastUsed = System.currentTimeMillis(); - this.connection = connection; - } - - public void ping() { - lastUsed = System.currentTimeMillis(); - } - - public boolean hasExpired() { - return (System.currentTimeMillis() > lastUsed + TTL); - } - - public Connection get() { - return connection; - } - - public void close() { - if (connection != null) { - try { - connection.close(); - } catch (SQLException e) { - LOGGER.debug("Failed to close connection " + e.getMessage(), e); - } - } - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClient.java b/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClient.java deleted file mode 100644 index 0b81b3fe..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/jdbc/JDBCStorageClient.java +++ /dev/null @@ -1,930 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.storage.jdbc; - -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; - -import org.apache.commons.lang.StringUtils; -import org.sakaiproject.nakamura.api.lite.ClientPoolException; -import org.sakaiproject.nakamura.api.lite.RemoveProperty; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.StorageClientUtils; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.api.lite.util.PreemptiveIterator; -import org.sakaiproject.nakamura.lite.content.FileStreamContentHelper; -import org.sakaiproject.nakamura.lite.content.StreamedContentHelper; -import org.sakaiproject.nakamura.lite.storage.Disposable; -import org.sakaiproject.nakamura.lite.storage.DisposableIterator; -import org.sakaiproject.nakamura.lite.storage.RowHasher; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.sakaiproject.nakamura.lite.types.Types; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.UnsupportedEncodingException; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.text.MessageFormat; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; - -public class JDBCStorageClient implements StorageClient, RowHasher { - - private static final Logger LOGGER = LoggerFactory.getLogger(JDBCStorageClient.class); - private static final String SQL_VALIDATE = "validate"; - private static final String SQL_CHECKSCHEMA = "check-schema"; - private static final String SQL_COMMENT = "#"; - private static final String SQL_EOL = ";"; - private static final String SQL_DELETE_STRING_ROW = "delete-string-row"; - private static final String SQL_INSERT_STRING_COLUMN = "insert-string-column"; - private static final String SQL_UPDATE_STRING_COLUMN = "update-string-column"; - private static final String SQL_REMOVE_STRING_COLUMN = "remove-string-column"; - - private static final String SQL_BLOCK_DELETE_ROW = "block-delete-row"; - private static final String SQL_BLOCK_SELECT_ROW = "block-select-row"; - private static final String SQL_BLOCK_INSERT_ROW = "block-insert-row"; - private static final String SQL_BLOCK_UPDATE_ROW = "block-update-row"; - - private static final String SELECT_INDEX_COLUMNS = "select-index-columns"; - private static final String PROP_HASH_ALG = "rowid-hash"; - private static final String USE_BATCH_INSERTS = "use-batch-inserts"; - - private JDBCStorageClientPool jcbcStorageClientConnection; - private Map sqlConfig; - private boolean active; - private StreamedContentHelper streamedContentHelper; - private List toDispose = Lists.newArrayList(); - private Exception closed; - private Exception passivate; - private String rowidHash; - private Map counters = Maps.newConcurrentHashMap(); - private Set indexColumns; - - public JDBCStorageClient(JDBCStorageClientPool jdbcStorageClientConnectionPool, - Map properties, Map sqlConfig) throws SQLException, - NoSuchAlgorithmException, StorageClientException { - this.jcbcStorageClientConnection = jdbcStorageClientConnectionPool; - streamedContentHelper = new FileStreamContentHelper(this, properties); - - this.sqlConfig = sqlConfig; - rowidHash = getSql(PROP_HASH_ALG); - if (rowidHash == null) { - rowidHash = "MD5"; - } - active = true; - - } - - public Map get(String keySpace, String columnFamily, String key) - throws StorageClientException { - checkClosed(); - ResultSet body = null; - Map result = Maps.newHashMap(); - String rid = rowHash(keySpace, columnFamily, key); - PreparedStatement selectStringRow = null; - try { - selectStringRow = getStatement(keySpace, columnFamily, SQL_BLOCK_SELECT_ROW, rid, null); - inc("A"); - selectStringRow.clearWarnings(); - selectStringRow.clearParameters(); - selectStringRow.setString(1, rid); - body = selectStringRow.executeQuery(); - inc("B"); - if (body.next()) { - Types.loadFromStream(rid, result, body.getBinaryStream(1)); - } - } catch (SQLException e) { - LOGGER.warn("Failed to perform get operation on " + keySpace + ":" + columnFamily - + ":" + key, e); - if (passivate != null) { - LOGGER.warn("Was Pasivated ", passivate); - } - if (closed != null) { - LOGGER.warn("Was Closed ", closed); - } - throw new StorageClientException(e.getMessage(), e); - } catch (IOException e) { - LOGGER.warn("Failed to perform get operation on " + keySpace + ":" + columnFamily - + ":" + key, e); - if (passivate != null) { - LOGGER.warn("Was Pasivated ", passivate); - } - if (closed != null) { - LOGGER.warn("Was Closed ", closed); - } - throw new StorageClientException(e.getMessage(), e); - } finally { - close(body, "B"); - close(selectStringRow, "A"); - } - return result; - } - - public String rowHash(String keySpace, String columnFamily, String key) - throws StorageClientException { - MessageDigest hasher; - try { - hasher = MessageDigest.getInstance(rowidHash); - } catch (NoSuchAlgorithmException e1) { - throw new StorageClientException("Unable to get hash algorithm " + e1.getMessage(), e1); - } - String keystring = keySpace + ":" + columnFamily + ":" + key; - byte[] ridkey; - try { - ridkey = keystring.getBytes("UTF8"); - } catch (UnsupportedEncodingException e) { - ridkey = keystring.getBytes(); - } - return StorageClientUtils.encode(hasher.digest(ridkey), - StorageClientUtils.URL_SAFE_ENCODING); - } - - public void insert(String keySpace, String columnFamily, String key, Map values, boolean probablyNew) - throws StorageClientException { - checkClosed(); - Map statementCache = Maps.newHashMap(); - try { - String rid = rowHash(keySpace, columnFamily, key); - for (Entry e : values.entrySet()) { - String k = e.getKey(); - Object o = e.getValue(); - if (o instanceof byte[]) { - throw new RuntimeException("Invalid content in " + k - + ", storing byte[] rather than streaming it"); - } - } - - Map m = get(keySpace, columnFamily, key); - for (Entry e : values.entrySet()) { - String k = e.getKey(); - Object o = e.getValue(); - - if (o instanceof RemoveProperty || o == null) { - m.remove(k); - } else { - m.put(k, o); - } - } - if ( probablyNew ) { - PreparedStatement insertBlockRow = getStatement(keySpace, columnFamily, - SQL_BLOCK_INSERT_ROW, rid, statementCache); - insertBlockRow.clearWarnings(); - insertBlockRow.clearParameters(); - insertBlockRow.setString(1, rid); - insertBlockRow.setBinaryStream(2, Types.storeMapToStream(rid, m)); - int rowsInserted = 0; - try { - rowsInserted = insertBlockRow.executeUpdate(); - } catch ( SQLException e ) { - LOGGER.debug(e.getMessage(),e); - } - if ( rowsInserted == 0 ) { - PreparedStatement updateBlockRow = getStatement(keySpace, columnFamily, - SQL_BLOCK_UPDATE_ROW, rid, statementCache); - updateBlockRow.clearWarnings(); - updateBlockRow.clearParameters(); - updateBlockRow.setString(2, rid); - updateBlockRow.setBinaryStream(1, Types.storeMapToStream(rid, m)); - if( updateBlockRow.executeUpdate() == 0) { - throw new StorageClientException("Failed to save " + rid); - } else { - LOGGER.debug("Updated {} ", rid); - } - } else { - LOGGER.debug("Inserted {} ", rid); - } - } else { - PreparedStatement updateBlockRow = getStatement(keySpace, columnFamily, - SQL_BLOCK_UPDATE_ROW, rid, statementCache); - updateBlockRow.clearWarnings(); - updateBlockRow.clearParameters(); - updateBlockRow.setString(2, rid); - updateBlockRow.setBinaryStream(1, Types.storeMapToStream(rid, m)); - if (updateBlockRow.executeUpdate() == 0) { - PreparedStatement insertBlockRow = getStatement(keySpace, columnFamily, - SQL_BLOCK_INSERT_ROW, rid, statementCache); - insertBlockRow.clearWarnings(); - insertBlockRow.clearParameters(); - insertBlockRow.setString(1, rid); - insertBlockRow.setBinaryStream(2, Types.storeMapToStream(rid, m)); - if (insertBlockRow.executeUpdate() == 0) { - throw new StorageClientException("Failed to save " + rid); - } else { - LOGGER.debug("Inserted {} ", rid); - } - } else { - LOGGER.debug("Updated {} ", rid); - } - } - if ("1".equals(getSql(USE_BATCH_INSERTS))) { - Connection connection = jcbcStorageClientConnection.getConnection(); - boolean autoCommit = connection.getAutoCommit(); - connection.setAutoCommit(false); - Set updateSet = Sets.newHashSet(); - Map>> updateSequence = Maps - .newHashMap(); - Set removeSet = Sets.newHashSet(); - for (Entry e : values.entrySet()) { - String k = e.getKey(); - Object o = e.getValue(); - if (shouldIndex(keySpace, columnFamily, k)) { - if (o instanceof String) { - PreparedStatement updateStringColumn = getStatement(keySpace, - columnFamily, SQL_UPDATE_STRING_COLUMN, rid, statementCache); - updateStringColumn.setString(1, (String) o); - updateStringColumn.setString(2, rid); - updateStringColumn.setString(3, k); - updateStringColumn.addBatch(); - updateSet.add(updateStringColumn); - List> updateSeq = updateSequence - .get(updateStringColumn); - if (updateSeq == null) { - updateSeq = Lists.newArrayList(); - updateSequence.put(updateStringColumn, updateSeq); - } - updateSeq.add(e); - } else if (o instanceof RemoveProperty || o == null) { - PreparedStatement removeStringColumn = getStatement(keySpace, - columnFamily, SQL_REMOVE_STRING_COLUMN, rid, statementCache); - removeStringColumn.setString(1, rid); - removeStringColumn.setString(2, k); - removeStringColumn.addBatch(); - removeSet.add(removeStringColumn); - } - } - } - // execute the updates and add the necessary inserts. - Map>> insertSequence = Maps - .newHashMap(); - - Set insertSet = Sets.newHashSet(); - for (PreparedStatement pst : updateSet) { - int[] res = pst.executeBatch(); - List> updateSeq = updateSequence.get(pst); - for (int i = 0; i < res.length; i++) { - Entry e = updateSeq.get(i); - if (res[i] <= 0) { - String k = e.getKey(); - Object o = e.getValue(); - if (o instanceof String) { - PreparedStatement insertStringColumn = getStatement(keySpace, - columnFamily, SQL_INSERT_STRING_COLUMN, rid, statementCache); - insertStringColumn.setString(1, (String) o); - insertStringColumn.setString(2, rid); - insertStringColumn.setString(3, k); - insertStringColumn.addBatch(); - insertSet.add(insertStringColumn); - List> insertSeq = insertSequence - .get(insertStringColumn); - if (insertSeq == null) { - insertSeq = Lists.newArrayList(); - insertSequence.put(insertStringColumn, insertSeq); - } - insertSeq.add(e); - } - } else { - LOGGER.debug("Index updated for {} {} ", new Object[] { rid, e.getKey(), - e.getValue() }); - } - } - } - // execute the inserts and removes. - for (PreparedStatement pst : insertSet) { - int[] res = pst.executeBatch(); - List> insertSeq = insertSequence.get(pst); - for (int i = 0; i < res.length; i++ ) { - Entry e = insertSeq.get(i); - if ( res[i] <= 0 ) { - LOGGER.warn("Index failed for {} {} ", new Object[] { rid, e.getKey(), - e.getValue() }); - - } else { - LOGGER.debug("Index inserted for {} {} ", new Object[] { rid, e.getKey(), - e.getValue() }); - - } - } - } - for (PreparedStatement pst : removeSet) { - pst.executeBatch(); - } - if (autoCommit) { - connection.commit(); - connection.setAutoCommit(autoCommit); - } - - } else { - for (Entry e : values.entrySet()) { - String k = e.getKey(); - Object o = e.getValue(); - if (shouldIndex(keySpace, columnFamily, k)) { - if (o instanceof String) { - PreparedStatement updateStringColumn = getStatement(keySpace, - columnFamily, SQL_UPDATE_STRING_COLUMN, rid, statementCache); - updateStringColumn.clearWarnings(); - updateStringColumn.clearParameters(); - updateStringColumn.setString(1, (String) o); - updateStringColumn.setString(2, rid); - updateStringColumn.setString(3, k); - - if (updateStringColumn.executeUpdate() == 0) { - PreparedStatement insertStringColumn = getStatement(keySpace, - columnFamily, SQL_INSERT_STRING_COLUMN, rid, statementCache); - insertStringColumn.clearWarnings(); - insertStringColumn.clearParameters(); - insertStringColumn.setString(1, (String) o); - insertStringColumn.setString(2, rid); - insertStringColumn.setString(3, k); - if (insertStringColumn.executeUpdate() == 0) { - throw new StorageClientException("Failed to save " - + getRowId(keySpace, columnFamily, key) + " column:[" - + k + "] "); - } else { - LOGGER.debug("Inserted Index {} {} [{}]", - new Object[] { getRowId(keySpace, columnFamily, key), - k, o }); - } - } else { - LOGGER.debug( - "Updated Index {} {} [{}]", - new Object[] { getRowId(keySpace, columnFamily, key), k, o }); - } - } else if (o instanceof RemoveProperty || o == null) { - PreparedStatement removeStringColumn = getStatement(keySpace, - columnFamily, SQL_REMOVE_STRING_COLUMN, rid, statementCache); - removeStringColumn.clearWarnings(); - removeStringColumn.clearParameters(); - removeStringColumn.setString(1, rid); - removeStringColumn.setString(2, k); - if (removeStringColumn.executeUpdate() == 0) { - m = get(keySpace, columnFamily, key); - LOGGER.debug( - "Column Not present did not remove {} {} Current Column:{} ", - new Object[] { getRowId(keySpace, columnFamily, key), k, m }); - } else { - LOGGER.debug("Removed Index {} {} ", - getRowId(keySpace, columnFamily, key), k); - } - } - } - } - } - } catch (SQLException e) { - LOGGER.warn("Failed to perform insert/update operation on {}:{}:{} ", new Object[] { - keySpace, columnFamily, key }, e); - throw new StorageClientException(e.getMessage(), e); - } catch (IOException e) { - LOGGER.warn("Failed to perform insert/update operation on {}:{}:{} ", new Object[] { - keySpace, columnFamily, key }, e); - throw new StorageClientException(e.getMessage(), e); - } finally { - close(statementCache); - } - } - - private boolean shouldIndex(String keySpace, String columnFamily, String k) { - if (indexColumns == null) { - PreparedStatement pst = null; - ResultSet rs = null; - try { - pst = getStatement(keySpace, columnFamily, SELECT_INDEX_COLUMNS, "default", null); - inc(SELECT_INDEX_COLUMNS); - pst.clearWarnings(); - pst.clearParameters(); - rs = pst.executeQuery(); - inc("select-index-columns-rs"); - Set loadIndexColumns = Sets.newHashSet(); - while (rs.next()) { - loadIndexColumns.add(rs.getString(1)); - } - indexColumns = loadIndexColumns; - LOGGER.debug("Indexing Colums is {} ", indexColumns); - } catch (SQLException e) { - LOGGER.warn(e.getMessage(), e); - return false; - } finally { - close(rs, "select-index-columns-rs"); - close(pst, SELECT_INDEX_COLUMNS); - } - } - if (indexColumns.contains(columnFamily + ":" + k)) { - LOGGER.debug("Will Index {}:{}", columnFamily, k); - return true; - } else { - LOGGER.debug("Should Not Index {}:{}", columnFamily, k); - return false; - } - } - - private String getRowId(String keySpace, String columnFamily, String key) { - return keySpace + ":" + columnFamily + ":" + key; - } - - public void remove(String keySpace, String columnFamily, String key) - throws StorageClientException { - checkClosed(); - PreparedStatement deleteStringRow = null; - PreparedStatement deleteBlockRow = null; - String rid = rowHash(keySpace, columnFamily, key); - try { - deleteStringRow = getStatement(keySpace, columnFamily, SQL_DELETE_STRING_ROW, rid, null); - inc("deleteStringRow"); - deleteStringRow.clearWarnings(); - deleteStringRow.clearParameters(); - deleteStringRow.setString(1, rid); - deleteStringRow.executeUpdate(); - - deleteBlockRow = getStatement(keySpace, columnFamily, SQL_BLOCK_DELETE_ROW, rid, null); - inc("deleteBlockRow"); - deleteBlockRow.clearWarnings(); - deleteBlockRow.clearParameters(); - deleteBlockRow.setString(1, rid); - deleteBlockRow.executeUpdate(); - - } catch (SQLException e) { - LOGGER.warn("Failed to perform delete operation on {}:{}:{} ", new Object[] { keySpace, - columnFamily, key }, e); - throw new StorageClientException(e.getMessage(), e); - } finally { - close(deleteStringRow, "deleteStringRow"); - close(deleteBlockRow, "deleteBlockRow"); - } - } - - public void close() { - if (closed == null) { - try { - closed = new Exception("Connection Closed Traceback"); - shutdownConnection(); - jcbcStorageClientConnection.releaseClient(this); - } catch (Throwable t) { - LOGGER.error("Failed to close connection ", t); - } - } - } - - private void checkClosed() throws StorageClientException { - if (closed != null) { - throw new StorageClientException( - "Connection Has Been closed, traceback of close location follows ", closed); - } - } - - /** - * Get a prepared statement, potentially optimized and sharded. - * - * @param keySpace - * @param columnFamily - * @param sqlSelectStringRow - * @param rid - * @param statementCache - * @return - * @throws SQLException - */ - private PreparedStatement getStatement(String keySpace, String columnFamily, - String sqlSelectStringRow, String rid, Map statementCache) - throws SQLException { - String shard = rid.substring(0, 1); - String[] keys = new String[] { - sqlSelectStringRow + "." + keySpace + "." + columnFamily + "._" + shard, - sqlSelectStringRow + "." + columnFamily + "._" + shard, - sqlSelectStringRow + "." + keySpace + "._" + shard, - sqlSelectStringRow + "._" + shard, - sqlSelectStringRow + "." + keySpace + "." + columnFamily, - sqlSelectStringRow + "." + columnFamily, sqlSelectStringRow + "." + keySpace, - sqlSelectStringRow }; - for (String k : keys) { - if (sqlConfig.containsKey(k)) { - if (statementCache != null && statementCache.containsKey(k)) { - return statementCache.get(k); - } else { - PreparedStatement pst = jcbcStorageClientConnection.getConnection() - .prepareStatement((String) sqlConfig.get(k)); - if (statementCache != null) { - inc("cachedStatement"); - statementCache.put(k, pst); - } - return pst; - } - } - } - return null; - } - - public void shutdownConnection() { - if (active) { - disposeDisposables(); - active = false; - } - } - - private void disposeDisposables() { - passivate = new Exception("Passivate Traceback"); - for (Disposable d : toDispose) { - d.close(); - } - } - - private T registerDisposable(T disposable) { - toDispose.add(disposable); - return disposable; - } - - public boolean validate() throws StorageClientException { - checkClosed(); - Statement statement = null; - try { - statement = jcbcStorageClientConnection.getConnection().createStatement(); - inc("vaidate"); - - statement.execute(getSql(SQL_VALIDATE)); - return true; - } catch (SQLException e) { - LOGGER.warn("Failed to validate connection ", e); - return false; - } finally { - try { - statement.close(); - dec("vaidate"); - } catch (Throwable e) { - LOGGER.debug("Failed to close statement in validate ", e); - } - } - } - - private String getSql(String statementName) { - return (String) sqlConfig.get(statementName); - } - - public void checkSchema(String[] clientConfigLocations) throws ClientPoolException, - StorageClientException { - checkClosed(); - Statement statement = null; - try { - - statement = jcbcStorageClientConnection.getConnection().createStatement(); - try { - statement.execute(getSql(SQL_CHECKSCHEMA)); - inc("schema"); - LOGGER.info("Schema Exists"); - return; - } catch (SQLException e) { - LOGGER.info("Schema does not exist {}", e.getMessage()); - } - - for (String clientSQLLocation : clientConfigLocations) { - String clientDDL = clientSQLLocation + ".ddl"; - InputStream in = this.getClass().getClassLoader().getResourceAsStream(clientDDL); - if (in != null) { - try { - BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF8")); - int lineNo = 1; - String line = br.readLine(); - StringBuilder sqlStatement = new StringBuilder(); - while (line != null) { - line = StringUtils.stripEnd(line, null); - if (!line.isEmpty()) { - if (line.startsWith(SQL_COMMENT)) { - LOGGER.info("Comment {} ", line); - } else if (line.endsWith(SQL_EOL)) { - sqlStatement.append(line.substring(0, line.length() - 1)); - String ddl = sqlStatement.toString(); - try { - statement.executeUpdate(ddl); - LOGGER.info("SQL OK {}:{} {} ", new Object[] { - clientDDL, lineNo, ddl }); - } catch (SQLException e) { - LOGGER.warn("SQL ERROR {}:{} {} {} ", new Object[] { - clientDDL, lineNo, ddl, e.getMessage() }); - } - sqlStatement = new StringBuilder(); - } else { - sqlStatement.append(line); - } - } - line = br.readLine(); - lineNo++; - } - br.close(); - LOGGER.info("Schema Created from {} ", clientDDL); - - break; - } catch (Throwable e) { - LOGGER.error("Failed to load Schema from {}", clientDDL, e); - } finally { - try { - in.close(); - } catch (IOException e) { - LOGGER.error("Failed to close stream from {}", clientDDL, e); - } - - } - } else { - LOGGER.info("No Schema found at {} ", clientDDL); - } - - } - - } catch (SQLException e) { - LOGGER.info("Failed to create schema ", e); - throw new ClientPoolException("Failed to create schema ", e); - } finally { - try { - statement.close(); - dec("schema"); - } catch (Throwable e) { - LOGGER.debug("Failed to close statement in validate ", e); - } - } - } - - public void activate() { - passivate = null; - } - - public void passivate() { - disposeDisposables(); - } - - public Map streamBodyIn(String keySpace, String columnFamily, String contentId, - String contentBlockId, String streamId, Map content, InputStream in) - throws StorageClientException, AccessDeniedException, IOException { - checkClosed(); - return streamedContentHelper.writeBody(keySpace, columnFamily, contentId, contentBlockId, - streamId, content, in); - } - - public InputStream streamBodyOut(String keySpace, String columnFamily, String contentId, - String contentBlockId, String streamId, Map content) - throws StorageClientException, AccessDeniedException, IOException { - checkClosed(); - final InputStream in = streamedContentHelper.readBody(keySpace, columnFamily, - contentBlockId, streamId, content); - if ( in != null ) { - registerDisposable(new Disposable() { - - private boolean open = true; - - public void close() { - if (open && in != null) { - try { - in.close(); - } catch (IOException e) { - LOGGER.warn(e.getMessage(), e); - } - open = false; - } - - } - }); - } - return in; - } - - protected Connection getConnection() throws StorageClientException, SQLException { - checkClosed(); - return jcbcStorageClientConnection.getConnection(); - } - - public DisposableIterator> find(String keySpace, String columnFamily, - Map properties) throws StorageClientException { - checkClosed(); - - String[] keys = new String[] { "block-find." + keySpace + "." + columnFamily, - "block-find." + columnFamily, "block-find" }; - - String sql = null; - for (String statementKey : keys) { - sql = getSql(statementKey); - if (sql != null) { - break; - } - } - if (sql == null) { - throw new StorageClientException("Failed to locate SQL statement for any of " - + Arrays.toString(keys)); - } - - String[] statementParts = StringUtils.split(sql, ';'); - - StringBuilder tables = new StringBuilder(); - StringBuilder where = new StringBuilder(); - List parameters = Lists.newArrayList(); - int set = 0; - for (Entry e : properties.entrySet()) { - Object v = e.getValue(); - String k = e.getKey(); - if ( shouldIndex(keySpace, columnFamily, k) ) { - if (v != null) { - String t = "a" + set; - tables.append(MessageFormat.format(statementParts[1], t)); - where.append(MessageFormat.format(statementParts[2], t)); - parameters.add(k); - parameters.add(v); - set++; - } - } else { - LOGGER.warn("Search on {}:{} is not supported, filter dropped ",columnFamily,k); - } - } - - final String sqlStatement = MessageFormat.format(statementParts[0], tables.toString(), - where.toString()); - - PreparedStatement tpst = null; - ResultSet trs = null; - try { - LOGGER.debug("Preparing {} ", sqlStatement); - tpst = jcbcStorageClientConnection.getConnection().prepareStatement(sqlStatement); - inc("iterator"); - tpst.clearParameters(); - int i = 1; - for (Object params : parameters) { - tpst.setObject(i, params); - LOGGER.debug("Setting {} ", params); - - i++; - } - - trs = tpst.executeQuery(); - inc("iterator r"); - LOGGER.debug("Executed "); - - // pass control to the iterator. - final PreparedStatement pst = tpst; - final ResultSet rs = trs; - tpst = null; - trs = null; - return registerDisposable(new PreemptiveIterator>() { - - private Map map = Maps.newHashMap(); - private boolean open = true; - - protected Map internalNext() { - return map; - } - - protected boolean internalHasNext() { - try { - if (open && rs.next()) { - map.clear(); - Types.loadFromStream(rs.getString(1), map, - rs.getBinaryStream(2)); - return true; - } - LOGGER.debug("No More Records "); - close(); - map = null; - return false; - } catch (SQLException e) { - LOGGER.error(e.getMessage(), e); - close(); - map = null; - return false; - } catch (IOException e) { - LOGGER.error(e.getMessage(), e); - close(); - map = null; - return false; - } - } - - public void close() { - if (open) { - open = false; - try { - if (rs != null) { - rs.close(); - dec("iterator r"); - } - } catch (SQLException e) { - LOGGER.warn(e.getMessage(), e); - } - try { - if (pst != null) { - pst.close(); - dec("iterator"); - } - } catch (SQLException e) { - LOGGER.warn(e.getMessage(), e); - } - } - - } - }); - } catch (SQLException e) { - LOGGER.error(e.getMessage(), e); - throw new StorageClientException(e.getMessage() + " SQL Statement was " + sqlStatement, - e); - } finally { - // trs and tpst will only be non null if control has not been passed - // to the iterator. - try { - if (trs != null) { - trs.close(); - dec("iterator r"); - } - } catch (SQLException e) { - LOGGER.warn(e.getMessage(), e); - } - try { - if (tpst != null) { - tpst.close(); - dec("iterator"); - } - } catch (SQLException e) { - LOGGER.warn(e.getMessage(), e); - } - } - - } - - private void dec(String key) { - AtomicInteger cn = counters.get(key); - if (cn == null) { - LOGGER.warn("Never Statement/ResultSet Created Counter {} ", key); - } else { - cn.decrementAndGet(); - } - } - - private void inc(String key) { - AtomicInteger cn = counters.get(key); - if (cn == null) { - cn = new AtomicInteger(); - counters.put(key, cn); - } - int c = cn.incrementAndGet(); - if (c > 10) { - LOGGER.warn( - "Counter {} Leaking {}, please investigate. This will eventually cause an OOM Error. ", - key, c); - } - } - - private void close(ResultSet rs, String name) { - try { - if (rs != null) { - rs.close(); - dec(name); - } - } catch (Throwable e) { - LOGGER.debug("Failed to close result set, ok to ignore this message ", e); - } - } - - private void close(PreparedStatement pst, String name) { - try { - if (pst != null) { - pst.close(); - dec(name); - } - } catch (Throwable e) { - LOGGER.debug("Failed to close prepared set, ok to ignore this message ", e); - } - } - - private void close(Map statementCache) { - for (PreparedStatement pst : statementCache.values()) { - if (pst != null) { - try { - pst.close(); - dec("cachedStatement"); - } catch (SQLException e) { - LOGGER.debug(e.getMessage(), e); - } - } - } - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClient.java b/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClient.java deleted file mode 100644 index cebc9f18..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/storage/mem/MemoryStorageClient.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.storage.mem; - -import org.sakaiproject.nakamura.api.lite.RemoveProperty; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.StorageClientUtils; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.api.lite.content.Content; -import org.sakaiproject.nakamura.lite.content.BlockContentHelper; -import org.sakaiproject.nakamura.lite.content.BlockSetContentHelper; -import org.sakaiproject.nakamura.lite.storage.DisposableIterator; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.ConcurrentHashMap; - -public class MemoryStorageClient implements StorageClient { - - private static final Logger LOGGER = LoggerFactory.getLogger(MemoryStorageClient.class); - Map> store; - private int blockSize; - private int maxChunksPerBlockSet; - private BlockContentHelper contentHelper; - private MemoryStorageClientPool pool; - - public MemoryStorageClient(MemoryStorageClientPool pool, - Map> store, Map properties) { - this.store = store; - this.pool = pool; - contentHelper = new BlockSetContentHelper(this); - blockSize = StorageClientUtils.getSetting( - properties.get(BlockSetContentHelper.CONFIG_BLOCK_SIZE), - BlockSetContentHelper.DEFAULT_BLOCK_SIZE); - maxChunksPerBlockSet = StorageClientUtils.getSetting( - properties.get(BlockSetContentHelper.CONFIG_MAX_CHUNKS_PER_BLOCK), - BlockSetContentHelper.DEFAULT_MAX_CHUNKS_PER_BLOCK); - - } - - public void close() { - pool.releaseClient(this); - } - - public void destroy() { - } - - public Map get(String keySpace, String columnFamily, String key) - throws StorageClientException { - return (Map) getOrCreateRow(keySpace, columnFamily, key); - } - - private Map getOrCreateRow(String keySpace, String columnFamily, String key) { - String keyName = getKey(keySpace, columnFamily, key); - - if (!store.containsKey(keyName)) { - Map row = new ConcurrentHashMap(); - store.put(keyName, row); - LOGGER.debug("Created {} as {} ", new Object[] { keyName, row }); - return row; - } - Map row = store.get(keyName); - LOGGER.debug("Got {} as {} ", new Object[] { keyName, row }); - return row; - } - - private String getKey(String keySpace, String columnFamily, String key) { - return keySpace + ":" + columnFamily + ":" + key; - } - - public void insert(String keySpace, String columnFamily, String key, Map values, boolean probablyNew) - throws StorageClientException { - Map row = get(keySpace, columnFamily, key); - - for (Entry e : values.entrySet()) { - Object value = e.getValue(); - if (value instanceof byte[]) { - byte[] bvalue = (byte[]) e.getValue(); - byte[] nvalue = new byte[bvalue.length]; - System.arraycopy(bvalue, 0, nvalue, 0, bvalue.length); - value = nvalue; - } - if (value == null || value instanceof RemoveProperty) { - row.remove(e.getKey()); - } else { - row.put(e.getKey(), value); - } - } - LOGGER.debug("Updated {} {} ", key, row); - } - - public void remove(String keySpace, String columnFamily, String key) - throws StorageClientException { - String keyName = getKey(keySpace, columnFamily, key); - if (store.containsKey(keyName)) { - store.remove(keyName); - } - } - - public Map streamBodyIn(String keySpace, String contentColumnFamily, - String contentId, String contentBlockId, String streamId, Map content, InputStream in) - throws StorageClientException, AccessDeniedException, IOException { - return contentHelper.writeBody(keySpace, contentColumnFamily, contentId, contentBlockId, streamId, - blockSize, maxChunksPerBlockSet, in); - } - - public InputStream streamBodyOut(String keySpace, String contentColumnFamily, String contentId, - String contentBlockId, String streamId, Map content) throws StorageClientException, - AccessDeniedException { - - int nBlocks = toInt(content.get(Content.NBLOCKS_FIELD)); - return contentHelper.readBody(keySpace, contentColumnFamily, contentBlockId, streamId, nBlocks); - } - - private int toInt(Object object) { - if ( object instanceof Integer) { - return ((Integer) object).intValue(); - } - return 0; - } - - public DisposableIterator> find(String keySpace, - String authorizableColumnFamily, Map properties) { - // TODO: Implement - throw new UnsupportedOperationException(); - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/BooleanArrayType.java b/src/main/java/org/sakaiproject/nakamura/lite/types/BooleanArrayType.java deleted file mode 100644 index f0854c58..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/BooleanArrayType.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.sakaiproject.nakamura.lite.types; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -public class BooleanArrayType implements Type { - - public int getTypeId() { - return 1005; - } - - public void save(DataOutputStream dos, Object object) throws IOException { - boolean[] values = (boolean[]) object; - dos.writeInt(values.length); - for ( boolean s : values) { - dos.writeBoolean(s); - } - } - - public boolean[] load(DataInputStream in) throws IOException { - int l = in.readInt(); - boolean[] values = new boolean[l]; - for ( int i = 0; i < l; i++ ) { - values[i] = in.readBoolean(); - } - return values; - } - - public Class getTypeClass() { - return boolean[].class; - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/DoubleArrayType.java b/src/main/java/org/sakaiproject/nakamura/lite/types/DoubleArrayType.java deleted file mode 100644 index a4a6d8dd..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/DoubleArrayType.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.sakaiproject.nakamura.lite.types; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -public class DoubleArrayType implements Type { - - public int getTypeId() { - return 1003; - } - - public void save(DataOutputStream dos, Object object) throws IOException { - double[] values = (double[]) object; - dos.writeInt(values.length); - for ( double s : values) { - dos.writeDouble(s); - } - } - - public double[] load(DataInputStream in) throws IOException { - int l = in.readInt(); - double[] values = new double[l]; - for ( int i = 0; i < l; i++ ) { - values[i] = in.readDouble(); - } - return values; - } - - public Class getTypeClass() { - return double[].class; - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/IntegerArrayType.java b/src/main/java/org/sakaiproject/nakamura/lite/types/IntegerArrayType.java deleted file mode 100644 index ff7201c1..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/IntegerArrayType.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.sakaiproject.nakamura.lite.types; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -public class IntegerArrayType implements Type { - - public int getTypeId() { - return 1002; - } - - public void save(DataOutputStream dos, Object object) throws IOException { - int[] values = (int[]) object; - dos.writeInt(values.length); - for ( int s : values) { - dos.writeInt(s); - } - } - - public int[] load(DataInputStream in) throws IOException { - int l = in.readInt(); - int[] values = new int[l]; - for ( int i = 0; i < l; i++ ) { - values[i] = in.readInt(); - } - return values; - } - - public Class getTypeClass() { - return int[].class; - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/LongArrayType.java b/src/main/java/org/sakaiproject/nakamura/lite/types/LongArrayType.java deleted file mode 100644 index 84e9510c..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/LongArrayType.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.sakaiproject.nakamura.lite.types; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -public class LongArrayType implements Type { - - public int getTypeId() { - return 1001; - } - - public void save(DataOutputStream dos, Object object ) throws IOException { - long[] values = (long[]) object; - dos.writeInt(values.length); - for ( long s : values) { - dos.writeLong(s); - } - } - - public long[] load(DataInputStream in) throws IOException { - int l = in.readInt(); - long[] values = new long[l]; - for ( int i = 0; i < l; i++ ) { - values[i] = in.readLong(); - } - return values; - } - - public Class getTypeClass() { - return long[].class; - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/StringType.java b/src/main/java/org/sakaiproject/nakamura/lite/types/StringType.java deleted file mode 100644 index 8506ce9b..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/StringType.java +++ /dev/null @@ -1,25 +0,0 @@ -package org.sakaiproject.nakamura.lite.types; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -public class StringType implements Type { - - public int getTypeId() { - return 0; - } - - public void save(DataOutputStream dos, Object object) throws IOException { - dos.writeUTF((String) object); - } - - public String load(DataInputStream in) throws IOException { - return in.readUTF(); - } - - public Class getTypeClass() { - return String.class; - } - -} diff --git a/src/main/java/org/sakaiproject/nakamura/lite/types/Type.java b/src/main/java/org/sakaiproject/nakamura/lite/types/Type.java deleted file mode 100644 index c9730162..00000000 --- a/src/main/java/org/sakaiproject/nakamura/lite/types/Type.java +++ /dev/null @@ -1,17 +0,0 @@ -package org.sakaiproject.nakamura.lite.types; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -public interface Type { - - int getTypeId(); - - void save(DataOutputStream dos, Object o) throws IOException; - - T load(DataInputStream in) throws IOException; - - Class getTypeClass(); - -} diff --git a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ApacheDerby.ddl b/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ApacheDerby.ddl deleted file mode 100644 index 6778a3cf..00000000 --- a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.ApacheDerby.ddl +++ /dev/null @@ -1,30 +0,0 @@ - -CREATE TABLE css ( - rid varchar(32) NOT NULL, - cid varchar(64) NOT NULL, - v varchar(780) NOT NULL, - primary key(rid,cid)); - - - -CREATE TABLE csb ( - rid varchar(32) NOT NULL, - cid varchar(64) NOT NULL, - v blob, - primary key(rid,cid)); - -CREATE INDEX css_locate_idx ON css (v, cid); - - -# Central Store for Object bodies, serialized content maps rather than columns -CREATE TABLE css_b ( - rid varchar(32) NOT NULL, - b blob, - primary key(rid)); - - -# Columns that need to be indexed -CREATE TABLE index_cols (cid varchar(64) NOT NULL); - -insert into index_cols (cid) values ('au:rep:principalName'); -insert into index_cols (cid) values ('au:type'); diff --git a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.5.1.ddl b/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.5.1.ddl deleted file mode 100644 index df925c80..00000000 --- a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.5.1.ddl +++ /dev/null @@ -1,101 +0,0 @@ -# If using mySQL 5.1 you can use innodb_autoinc_lock_mode=1 and have an an autoinc PK. -# Having and autoink PK in 5.0 and earlier will lead to table serialization as the key generation requires a full table lock which is why we have no -# PK in these tables -# The access mechanism must be update then insert to allow no PK and no Unique key. -# Please read http://harrison-fisk.blogspot.com/2009/02/my-favorite-new-feature-of-mysql-51.html for info. - -DROP TABLE IF EXISTS `css`; - -CREATE TABLE `css` ( - `id` int(11) NOT NULL auto_increment, - `rid` varchar(32) NOT NULL, - `cid` varchar(64) NOT NULL, - `v` varchar(780) NOT NULL, - PRIMARY KEY (`id`), - KEY `rowkey` USING BTREE (`rid`,`cid`), - KEY `cid_locate_i` (`v`(255),`cid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - -DROP TABLE IF EXISTS `au_css`; - -CREATE TABLE `au_css` ( - `id` int(11) NOT NULL auto_increment, - `rid` varchar(32) NOT NULL, - `cid` varchar(64) NOT NULL, - `v` varchar(780) NOT NULL, - PRIMARY KEY (`id`), - KEY `rowkey` USING BTREE (`rid`,`cid`), - KEY `cid_locate_i` (`v`(255),`cid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -DROP TABLE IF EXISTS `cn_css`; - -CREATE TABLE `cn_css` ( - `id` int(11) NOT NULL auto_increment, - `rid` varchar(32) NOT NULL, - `cid` varchar(64) NOT NULL, - `v` varchar(780) NOT NULL, - PRIMARY KEY (`id`), - KEY `rowkey` USING BTREE (`rid`,`cid`), - KEY `cid_locate_i` (`v`(255),`cid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - -DROP TABLE IF EXISTS `ac_css`; - -CREATE TABLE `ac_css` ( - `id` int(11) NOT NULL auto_increment, - `rid` varchar(32) NOT NULL, - `cid` varchar(64) NOT NULL, - `v` varchar(780) NOT NULL, - PRIMARY KEY (`id`), - KEY `rowkey` USING BTREE (`rid`,`cid`), - KEY `cid_locate_i` (`v`(255),`cid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - - -# Body Store. In some cases we want to store the bodies of the objects in a binary serialized lump -# This allows us to load and save the sparse map without using multiple records in the above tables and hence is more compact -# And uses less bandwidth to the DB. -# Where this is done, we still index certain fields as defined in index_cols - -CREATE TABLE `css_b` ( - `rid` varchar(32) NOT NULL, - `b` blob, - PRIMARY KEY (`rid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -# Central Store for Object bodies, serialized content maps rather than columns -CREATE TABLE `cn_css_b` ( - `rid` varchar(32) NOT NULL, - `b` blob, - PRIMARY KEY (`rid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -# Central Store for Object bodies, serialized content maps rather than columns -CREATE TABLE `au_css_b` ( - `rid` varchar(32) NOT NULL, - `b` blob, - PRIMARY KEY (`rid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -# Central Store for Object bodies, serialized content maps rather than columns -CREATE TABLE `ac_css_b` ( - `rid` varchar(32) NOT NULL, - `b` blob, - PRIMARY KEY (`rid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - - -# This table stores the colunms that are indexed - -CREATE TABLE `index_cols` ( - `cid` varchar(64) NOT NULL, - PRIMARY KEY (`cid`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -insert into index_cols (cid) values ('au:firstName'); - - diff --git a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.ddl b/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.ddl deleted file mode 100644 index 83cad5a0..00000000 --- a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.MySQL.ddl +++ /dev/null @@ -1,97 +0,0 @@ -# If using mySQL 5.1 you can use innodb_autoinc_lock_mode=1 and have an an autoinc PK. -# Having and autoink PK in 5.0 and earlier will lead to table serialization as the key generation requires a full table lock which is why we have no -# PK in these tables -# The access mechanism must be update then insert to allow no PK and no Unique key. -# Please read http://harrison-fisk.blogspot.com/2009/02/my-favorite-new-feature-of-mysql-51.html for info. - -DROP TABLE IF EXISTS `css`; - -# Central store - -CREATE TABLE `css` ( - `rid` varchar(32) NOT NULL, - `cid` varchar(64) NOT NULL, - `v` varchar(780) NOT NULL, - primary key USING HASH (`rid`,`cid`), - KEY `cid_locate_i` (`v`(255),`cid`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; - - -DROP TABLE IF EXISTS `au_css`; - -# Store just for Authorizables -CREATE TABLE `au_css` ( - `rid` varchar(32) NOT NULL, - `cid` varchar(64) NOT NULL, - `v` varchar(780) NOT NULL, - primary key USING HASH (`rid`,`cid`), - KEY `cid_locate_i` (`v`(255),`cid`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; - -DROP TABLE IF EXISTS `cn_css`; - -# Store just for Content -CREATE TABLE `cn_css` ( - `rid` varchar(32) NOT NULL, - `cid` varchar(64) NOT NULL, - `v` varchar(780) NOT NULL, - primary key USING HASH (`rid`,`cid`), - KEY `cid_locate_i` (`v`(255),`cid`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; - - -DROP TABLE IF EXISTS `ac_css`; - -# Store just for Access Control -CREATE TABLE `ac_css` ( - `rid` varchar(32) NOT NULL, - `cid` varchar(64) NOT NULL, - `v` varchar(780) NOT NULL, - primary key USING HASH (`rid`,`cid`), - KEY `cid_locate_i` (`v`(255),`cid`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; - - -# Body Store. In some cases we want to store the bodies of the objects in a binary serialized lump -# This allows us to load and save the sparse map without using multiple records in the above tables and hence is more compact -# And uses less bandwidth to the DB. -# Where this is done, we still index certain fields as defined in index_cols - -CREATE TABLE `css_b` ( - `rid` varchar(32) NOT NULL, - `b` blob, - primary key USING HASH (`rid`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; - -# Central Store for Object bodies, serialized content maps rather than columns -CREATE TABLE `cn_css_b` ( - `rid` varchar(32) NOT NULL, - `b` blob, - primary key USING HASH (`rid`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; - -# Central Store for Object bodies, serialized content maps rather than columns -CREATE TABLE `au_css_b` ( - `rid` varchar(32) NOT NULL, - `b` blob, - primary key USING HASH (`rid`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; - -# Central Store for Object bodies, serialized content maps rather than columns -CREATE TABLE `ac_css_b` ( - `rid` varchar(32) NOT NULL, - `b` blob, - primary key USING HASH (`rid`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; - - -# Columns that need to be indexed -CREATE TABLE `index_cols` ( - `cid` varchar(64) NOT NULL, - PRIMARY KEY (`cid`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; - -insert into index_cols (cid) values ('au:rep:principalName'); -insert into index_cols (cid) values ('au:type'); - - diff --git a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.sql b/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.sql deleted file mode 100644 index 9eea8edd..00000000 --- a/src/main/resources/org/sakaiproject/nakamura/lite/storage/jdbc/config/client.sql +++ /dev/null @@ -1,21 +0,0 @@ -delete-string-row = delete from css where rid = ? -select-string-row = select cid, v from css where rid = ? -insert-string-column = insert into css ( v, rid, cid) values ( ?, ?, ? ) -update-string-column = update css set v = ? where rid = ? and cid = ? -remove-string-column = delete from css where rid = ? and cid = ? -check-schema = select count(*) from css -find = select a.rid, a.cid, a.v from css a {0} where {1} 1 = 1 ;, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid and -validate = values(1) -rowid-hash = SHA1 - - -select-index-columns = select cid from index_cols - -block-select-row = select b from css_b where rid = ? -block-delete-row = delete from css_b where rid = ? -block-insert-row = insert into css_b (rid,b) values (?, ?) -block-update-row = update css_b set b = ? where rid = ? - -block-find = select a.rid, a.b from css_b a {0} where {1} 1 = 1;, css {0} ; {0}.cid = ? and {0}.v = ? and {0}.rid = a.rid and - -use-batch-inserts = 0 diff --git a/src/test/java/org/sakaiproject/nakamura/lite/OSGiStoreListenerTest.java b/src/test/java/org/sakaiproject/nakamura/lite/OSGiStoreListenerTest.java deleted file mode 100644 index fe4ce83a..00000000 --- a/src/test/java/org/sakaiproject/nakamura/lite/OSGiStoreListenerTest.java +++ /dev/null @@ -1,39 +0,0 @@ -package org.sakaiproject.nakamura.lite; - -import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; -import org.osgi.service.event.EventAdmin; -import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; - -public class OSGiStoreListenerTest { - - @Mock - private EventAdmin eventAdmin; - - public OSGiStoreListenerTest() { - MockitoAnnotations.initMocks(this); - } - - @Test - public void test() { - OSGiStoreListener l = new OSGiStoreListener(); - l.eventAdmin = eventAdmin; - for (String zone : new String[] { Security.ADMIN_AUTHORIZABLES, Security.ADMIN_GROUPS, - Security.ADMIN_USERS, Security.ZONE_ADMIN, Security.ZONE_AUTHORIZABLES, - Security.ZONE_CONTENT }) { - l.onDelete(zone, "path", "user"); - l.onDelete(zone, "path", "user", (String[]) null); - l.onDelete(zone, "path", "user", "xx"); - l.onDelete(zone, "path", "user", "x:x"); - l.onDelete(zone, null, "user", "x:x", "x:x"); - l.onUpdate(zone, "path", "user", true); - l.onUpdate(zone, "path", "user", false, (String[]) null); - l.onUpdate(zone, "path", "user", true, "xx"); - l.onUpdate(zone, "path", "user", false, "x:x"); - l.onUpdate(zone, null, "user", true, "x:x", "x:x"); - } - l.onLogin("userId", "sessionId"); - l.onLogout("userId", "sessoionID"); - } -} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/accesscontrol/AbstractAccessControlManagerImplTest.java b/src/test/java/org/sakaiproject/nakamura/lite/accesscontrol/AbstractAccessControlManagerImplTest.java deleted file mode 100644 index 96f44512..00000000 --- a/src/test/java/org/sakaiproject/nakamura/lite/accesscontrol/AbstractAccessControlManagerImplTest.java +++ /dev/null @@ -1,309 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.accesscontrol; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.sakaiproject.nakamura.api.lite.ClientPoolException; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AclModification; -import org.sakaiproject.nakamura.api.lite.accesscontrol.Permission; -import org.sakaiproject.nakamura.api.lite.accesscontrol.Permissions; -import org.sakaiproject.nakamura.api.lite.accesscontrol.Security; -import org.sakaiproject.nakamura.api.lite.authorizable.Group; -import org.sakaiproject.nakamura.api.lite.authorizable.User; -import org.sakaiproject.nakamura.lite.ConfigurationImpl; -import org.sakaiproject.nakamura.lite.LoggingStorageListener; -import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; -import org.sakaiproject.nakamura.lite.authorizable.AuthorizableManagerImpl; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -public abstract class AbstractAccessControlManagerImplTest { - private static final Logger LOGGER = LoggerFactory - .getLogger(AbstractAccessControlManagerImplTest.class); - private StorageClient client; - private ConfigurationImpl configuration; - private StorageClientPool clientPool; - - @Before - public void before() throws StorageClientException, AccessDeniedException, ClientPoolException, - ClassNotFoundException { - clientPool = getClientPool(); - client = clientPool.getClient(); - configuration = new ConfigurationImpl(); - Map properties = Maps.newHashMap(); - properties.put("keyspace", "n"); - properties.put("acl-column-family", "ac"); - properties.put("authorizable-column-family", "au"); - configuration.activate(properties); - AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, - configuration); - authorizableActivator.setup(); - LOGGER.info("Setup Complete"); - } - - protected abstract StorageClientPool getClientPool() throws ClassNotFoundException; - - @After - public void after() throws ClientPoolException { - client.close(); - } - - @Test - public void test() throws StorageClientException, AccessDeniedException { - AuthenticatorImpl authenticator = new AuthenticatorImpl(client, configuration); - User currentUser = authenticator.authenticate("admin", "admin"); - String u1 = "user1-"+System.currentTimeMillis(); - String u2 = "user2-"+System.currentTimeMillis(); - String u3 = "user3-"+System.currentTimeMillis(); - - AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, - currentUser, configuration, null, new LoggingStorageListener()); - AclModification user1 = new AclModification(u1, Permissions.CAN_ANYTHING.combine( - Permissions.CAN_ANYTHING_ACL).getPermission(), AclModification.Operation.OP_REPLACE); - AclModification user2 = new AclModification(u2, Permissions.CAN_READ - .combine(Permissions.CAN_WRITE).combine(Permissions.CAN_DELETE).getPermission(), - AclModification.Operation.OP_REPLACE); - AclModification user3 = new AclModification(u3, Permissions.CAN_READ.getPermission(), - AclModification.Operation.OP_REPLACE); - String basepath = "testpath"+System.currentTimeMillis(); - - accessControlManagerImpl.setAcl(Security.ZONE_AUTHORIZABLES, basepath, - new AclModification[] { user1, user2, user3 }); - - Map acl = accessControlManagerImpl.getAcl(Security.ZONE_AUTHORIZABLES, - basepath); - Assert.assertEquals(Integer.toHexString(Permissions.CAN_ANYTHING.combine( - Permissions.CAN_ANYTHING_ACL).getPermission()), Integer - .toHexString((Integer) acl.get(u1))); - Assert.assertEquals( - Permissions.CAN_READ.combine(Permissions.CAN_WRITE).combine(Permissions.CAN_DELETE) - .getPermission(), ((Integer)acl.get(u2)).intValue()); - Assert.assertEquals(Permissions.CAN_READ.getPermission(), - ((Integer)acl.get(u3)).intValue()); - for (Entry e : acl.entrySet()) { - LOGGER.info(" ACE {} : {} ", e.getKey(), e.getValue()); - } - LOGGER.info("Got ACL {}", acl); - - } - - @Test - public void testPrivileges() throws StorageClientException, AccessDeniedException { - AuthenticatorImpl authenticator = new AuthenticatorImpl(client, configuration); - User currentUser = authenticator.authenticate("admin", "admin"); - String u1 = "user1-"+System.currentTimeMillis(); - String u2 = "user2-"+System.currentTimeMillis(); - String u3 = "user3-"+System.currentTimeMillis(); - String basepath = "testpath"+System.currentTimeMillis(); - - AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, - currentUser, configuration, null, new LoggingStorageListener()); - AclModification user1CanAnything = new AclModification(AclModification.grantKey(u1), - Permissions.CAN_ANYTHING.combine(Permissions.CAN_ANYTHING_ACL).getPermission(), - AclModification.Operation.OP_REPLACE); - AclModification user2CantReadWrite = new AclModification(AclModification.denyKey(u2), - Permissions.CAN_READ.combine(Permissions.CAN_WRITE).combine(Permissions.CAN_DELETE) - .getPermission(), AclModification.Operation.OP_REPLACE); - AclModification user3cantRead = new AclModification(AclModification.denyKey(u3), - Permissions.CAN_READ.getPermission(), AclModification.Operation.OP_REPLACE); - - AclModification denyAnon = new AclModification(AclModification.denyKey(User.ANON_USER), - Permissions.ALL.getPermission(), AclModification.Operation.OP_REPLACE); - AclModification denyEveryone = new AclModification(AclModification.denyKey(Group.EVERYONE), - Permissions.ALL.getPermission(), AclModification.Operation.OP_REPLACE); - - AclModification user2CanReadWrite = new AclModification(AclModification.grantKey(u2), - Permissions.CAN_READ.combine(Permissions.CAN_WRITE).combine(Permissions.CAN_DELETE) - .getPermission(), AclModification.Operation.OP_REPLACE); - AclModification user3canRead = new AclModification(AclModification.grantKey(u3), - Permissions.CAN_READ.getPermission(), AclModification.Operation.OP_REPLACE); - - accessControlManagerImpl.setAcl(Security.ZONE_CONTENT, basepath+"/a/b/c", - new AclModification[] { user1CanAnything, user2CantReadWrite, user3cantRead, - denyAnon, denyEveryone }); - accessControlManagerImpl.setAcl(Security.ZONE_CONTENT, basepath+"/a/b", - new AclModification[] { user1CanAnything, user2CanReadWrite }); - accessControlManagerImpl.setAcl(Security.ZONE_CONTENT, basepath+"/a", new AclModification[] { - user1CanAnything, user3canRead }); - - Map acl = accessControlManagerImpl - .getAcl(Security.ZONE_CONTENT, basepath); - Assert.assertArrayEquals(new String[] {}, acl.keySet().toArray()); - - acl = accessControlManagerImpl.getAcl(Security.ZONE_CONTENT, basepath+"/a"); - Assert.assertArrayEquals(Arrays.toString(sortToArray(acl.keySet())), - new String[] { AclModification.grantKey(u1), AclModification.grantKey(u3) }, - sortToArray(acl.keySet())); - acl = accessControlManagerImpl.getAcl(Security.ZONE_CONTENT, basepath+"/a/b"); - Assert.assertArrayEquals( - new String[] { AclModification.grantKey(u1), AclModification.grantKey(u2) }, - sortToArray(acl.keySet())); - acl = accessControlManagerImpl.getAcl(Security.ZONE_CONTENT, basepath+"/a/b/c"); - Assert.assertArrayEquals(new String[] { AclModification.denyKey(User.ANON_USER), - AclModification.denyKey(Group.EVERYONE), AclModification.grantKey(u1), - AclModification.denyKey(u2), AclModification.denyKey(u3) }, - sortToArray(acl.keySet())); - - AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, client, - configuration, accessControlManagerImpl, null, new LoggingStorageListener()); - authorizableManager.createUser(u1, "User 1", "test", - ImmutableMap.of("test", (Object)"test")); - authorizableManager.createUser(u2, "User 2", "test", - ImmutableMap.of("test", (Object)"test")); - authorizableManager.createUser(u3, "User 3", "test", - ImmutableMap.of("test", (Object)"test")); - - User user1 = (User) authorizableManager.findAuthorizable(u1); - User user2 = (User) authorizableManager.findAuthorizable(u2); - User user3 = (User) authorizableManager.findAuthorizable(u3); - User adminUser = (User) authorizableManager.findAuthorizable(User.ADMIN_USER); - User anonUser = (User) authorizableManager.findAuthorizable(User.ANON_USER); - Group everyoneGroup = (Group) authorizableManager.findAuthorizable(Group.EVERYONE); - - - - Assert.assertNotNull(user1); - Assert.assertNotNull(user2); - Assert.assertNotNull(user3); - Assert.assertNotNull(adminUser); - Assert.assertNotNull(anonUser); - Assert.assertNotNull(everyoneGroup); - - Assert.assertTrue(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath, Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath, Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(adminUser, Security.ZONE_CONTENT, basepath, Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath, Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath, Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath, Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath, Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath, Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath, Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, basepath, Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, basepath, Permissions.ALL)); - - Assert.assertTrue(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath+"/a/b/c", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(adminUser, Security.ZONE_CONTENT, basepath+"/a/b/c", Permissions.ALL)); - Assert.assertFalse(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath+"/a/b/c", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath+"/a/b/c", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, basepath+"/a/b/c", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath+"/a/b/c", Permissions.CAN_READ)); - - - Assert.assertTrue(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(adminUser, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.CAN_WRITE.combine(Permissions.CAN_READ).combine(Permissions.CAN_DELETE))); - Assert.assertFalse(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, basepath+"/a/b", Permissions.ALL)); - - Assert.assertTrue(accessControlManagerImpl.can(user1, Security.ZONE_CONTENT, basepath+"/a", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(adminUser, Security.ZONE_CONTENT, basepath+"/a", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath+"/a", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(user2, Security.ZONE_CONTENT, basepath+"/a", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath+"/a", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(user3, Security.ZONE_CONTENT, basepath+"/a", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath+"/a", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(anonUser, Security.ZONE_CONTENT, basepath+"/a", Permissions.ALL)); - Assert.assertTrue(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, basepath+"/a", Permissions.CAN_READ)); - Assert.assertFalse(accessControlManagerImpl.can(everyoneGroup, Security.ZONE_CONTENT, basepath+"/a", Permissions.ALL)); - - - String[] testpaths = { basepath, basepath+"/a", basepath+"/a/b", basepath+"/a/b/c", }; - - checkPermissions(user1, testpaths, new Permission[][] { - { Permissions.CAN_READ }, - { Permissions.CAN_READ, Permissions.CAN_WRITE, Permissions.CAN_DELETE, - Permissions.CAN_READ_ACL, Permissions.CAN_WRITE_ACL, - Permissions.CAN_DELETE_ACL, Permissions.CAN_MANAGE, Permissions.ALL }, - { Permissions.CAN_READ, Permissions.CAN_WRITE, Permissions.CAN_DELETE, - Permissions.CAN_READ_ACL, Permissions.CAN_WRITE_ACL, - Permissions.CAN_DELETE_ACL, Permissions.CAN_MANAGE, Permissions.ALL }, - { Permissions.CAN_READ, Permissions.CAN_WRITE, Permissions.CAN_DELETE, - Permissions.CAN_READ_ACL, Permissions.CAN_WRITE_ACL, - Permissions.CAN_DELETE_ACL, Permissions.CAN_MANAGE, Permissions.ALL } }, - new String[][] { - { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE }, - { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u3 }, - { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u2, - u3 }, { User.ADMIN_USER, u1 } }, new String[][] { {}, {}, - {}, { User.ANON_USER, Group.EVERYONE, u2, u3 } }); - checkPermissions(user2, testpaths, new Permission[][] { { Permissions.CAN_READ }, - { Permissions.CAN_READ }, - { Permissions.CAN_READ, Permissions.CAN_WRITE, Permissions.CAN_DELETE }, {} }, - new String[][] { - { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE }, - { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u3 }, - { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u2, - u3 }, { User.ADMIN_USER, u1 } }, new String[][] { {}, {}, - {}, { User.ANON_USER, Group.EVERYONE, u2, u3 } }); - checkPermissions(user3, testpaths, new Permission[][] { { Permissions.CAN_READ }, - { Permissions.CAN_READ }, { Permissions.CAN_READ }, {} }, new String[][] { - { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE }, - { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u3 }, - { User.ADMIN_USER, User.ANON_USER, Group.EVERYONE, u1, u2, u3 }, - { User.ADMIN_USER, u1 } }, new String[][] { {}, {}, {}, - { User.ANON_USER, Group.EVERYONE, u2, u3 } }); - - - } - - private void checkPermissions(User u, String[] testPath, Object[][] expectedPermissions, - String[][] readers, String[][] deniedReaders) throws StorageClientException { - AccessControlManagerImpl acmU = new AccessControlManagerImpl(client, u, configuration, null, new LoggingStorageListener()); - - for (int i = 0; i < testPath.length; i++) { - Permission[] p = acmU.getPermissions(Security.ZONE_CONTENT, testPath[i]); - LOGGER.info("Got {} {} {} ", - new Object[] { u.getId(), testPath[i], Arrays.toString(p) }); - Assert.assertArrayEquals(expectedPermissions[i], p); - String[] r = acmU.findPrincipals(Security.ZONE_CONTENT, testPath[i], - Permissions.CAN_READ.getPermission(), true); - Assert.assertArrayEquals(readers[i], sortToArray(ImmutableSet.of(r))); - r = acmU.findPrincipals(Security.ZONE_CONTENT, testPath[i], - Permissions.CAN_READ.getPermission(), false); - Assert.assertArrayEquals(deniedReaders[i], sortToArray(ImmutableSet.of(r))); - } - - } - - private String[] sortToArray(Set keySet) { - return Lists.sortedCopy(keySet).toArray(new String[keySet.size()]); - } - -} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/authorizable/AbstractAuthorizableManagerImplTest.java b/src/test/java/org/sakaiproject/nakamura/lite/authorizable/AbstractAuthorizableManagerImplTest.java deleted file mode 100644 index 29f286a9..00000000 --- a/src/test/java/org/sakaiproject/nakamura/lite/authorizable/AbstractAuthorizableManagerImplTest.java +++ /dev/null @@ -1,432 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.authorizable; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.sakaiproject.nakamura.api.lite.CacheHolder; -import org.sakaiproject.nakamura.api.lite.ClientPoolException; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.api.lite.authorizable.Authorizable; -import org.sakaiproject.nakamura.api.lite.authorizable.Group; -import org.sakaiproject.nakamura.api.lite.authorizable.User; -import org.sakaiproject.nakamura.lite.ConfigurationImpl; -import org.sakaiproject.nakamura.lite.LoggingStorageListener; -import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; -import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; -import org.sakaiproject.nakamura.lite.storage.ConcurrentLRUMap; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -public abstract class AbstractAuthorizableManagerImplTest { - - private static final Logger LOGGER = LoggerFactory - .getLogger(AbstractAuthorizableManagerImplTest.class); - private StorageClient client; - private ConfigurationImpl configuration; - private StorageClientPool clientPool; - private Map sharedCache = new ConcurrentLRUMap(1000); - - @Before - public void before() throws StorageClientException, AccessDeniedException, ClientPoolException, - ClassNotFoundException { - clientPool = getClientPool(); - client = clientPool.getClient(); - configuration = new ConfigurationImpl(); - Map properties = Maps.newHashMap(); - properties.put("keyspace", "n"); - properties.put("acl-column-family", "ac"); - properties.put("authorizable-column-family", "au"); - configuration.activate(properties); - AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, - configuration); - authorizableActivator.setup(); - LOGGER.info("Setup Complete"); - } - - protected abstract StorageClientPool getClientPool() throws ClassNotFoundException; - - @After - public void after() throws ClientPoolException { - client.close(); - } - - @Test - public void testAuthorizableManager() throws StorageClientException, AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - Assert.assertNotNull(currentUser); - - AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, - client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); - - Assert.assertNotNull(authorizableManager.findAuthorizable(User.ADMIN_USER)); - Assert.assertNotNull(authorizableManager.findAuthorizable(User.ANON_USER)); - Assert.assertEquals(currentUser, authorizableManager.getUser()); - } - - @Test - public void testAuthorizableManagerAccessDenied() throws StorageClientException, - AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "wrong-password"); - - Assert.assertNull(currentUser); - } - - @Test - public void testAuthorizableManagerUserNotFound() throws StorageClientException, - AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("nonuser", "wrong-password"); - - Assert.assertNull(currentUser); - } - - @Test - public void testAuthorizableManagerCheckUser() throws StorageClientException, - AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, - client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); - - Authorizable a = authorizableManager.findAuthorizable(User.ADMIN_USER); - Authorizable an = authorizableManager.findAuthorizable(User.ANON_USER); - Authorizable missing = authorizableManager.findAuthorizable("missinguser"); - Assert.assertNull(missing); - Assert.assertNotNull(a); - Assert.assertNotNull(an); - Assert.assertFalse(a instanceof Group); - Assert.assertFalse(an instanceof Group); - User user = (User) a; - String[] principals = user.getPrincipals(); - Assert.assertNotNull(principals); - Assert.assertEquals(1, principals.length); - Assert.assertTrue(user.isAdmin()); - - User anon = (User) an; - principals = anon.getPrincipals(); - Assert.assertNotNull(principals); - Assert.assertEquals(0, principals.length); - Assert.assertFalse(anon.isAdmin()); - - } - - @Test - public void testAuthorizableManagerCreateUser() throws StorageClientException, - AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, - client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); - - authorizableManager.delete("testuser"); - - Assert.assertTrue(authorizableManager.createUser("testuser", "Test User", "test", - ImmutableMap.of("testkey", (Object) "testvalue", "principals", - "administrators;testers", Authorizable.AUTHORIZABLE_TYPE_FIELD, - Authorizable.GROUP_VALUE))); - Assert.assertFalse(authorizableManager.createUser("testuser", "Test User", "test", - ImmutableMap.of("testkey", (Object) "testvalue", "principals", - "administrators;testers"))); - - Authorizable a = authorizableManager.findAuthorizable("testuser"); - Assert.assertNotNull(a); - Assert.assertFalse(a instanceof Group); - User user = (User) a; - String[] principals = user.getPrincipals(); - Assert.assertNotNull(principals); - LOGGER.info("Principals {} ", Arrays.toString(principals)); - Assert.assertEquals(3, principals.length); - Assert.assertTrue(user.isAdmin()); - - } - - @Test - public void testAuthorizableManagerCreateUserDenied() throws StorageClientException, - AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, - client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); - - authorizableManager.delete("testuser2"); - - Assert.assertTrue(authorizableManager.createUser("testuser2", "Test User", "test", - ImmutableMap.of("testkey", (Object) "testvalue", "principals", "testers", - Authorizable.AUTHORIZABLE_TYPE_FIELD, Authorizable.GROUP_VALUE))); - Assert.assertFalse(authorizableManager.createUser("testuser2", "Test User", "test", - ImmutableMap.of("testkey", (Object) "testvalue", "principals", - "administrators;testers"))); - - Authorizable a = authorizableManager.findAuthorizable("testuser2"); - Assert.assertNotNull(a); - Assert.assertFalse(a instanceof Group); - User user = (User) a; - String[] principals = user.getPrincipals(); - LOGGER.info("Principals {} ", Arrays.toString(principals)); - Assert.assertArrayEquals(new String[] { "testers", Group.EVERYONE }, principals); - - Assert.assertFalse(user.isAdmin()); - - AccessControlManagerImpl userAccessControlManagerImpl = new AccessControlManagerImpl( - client, user, configuration, sharedCache, new LoggingStorageListener()); - AuthorizableManagerImpl userAuthorizableManager = new AuthorizableManagerImpl(user, client, - configuration, userAccessControlManagerImpl, sharedCache, new LoggingStorageListener()); - - try { - userAuthorizableManager.createUser("testuser3", "Test User", "test", ImmutableMap.of( - "testkey", (Object) "testvalue", "principals", "administrators;testers", - Authorizable.AUTHORIZABLE_TYPE_FIELD, Authorizable.GROUP_VALUE)); - Assert.fail(); - } catch (AccessDeniedException e) { - LOGGER.info(" Correctly denied access {} ", e.getMessage()); - } - - try { - userAuthorizableManager.createUser("testuser4", "Test User", "test", ImmutableMap.of( - "testkey", (Object) "testvalue", "principals", "administrators;testers")); - Assert.fail(); - } catch (AccessDeniedException e) { - LOGGER.info(" Correctly denied access {} ", e.getMessage()); - } - - } - - @Test - public void testAuthorizableManagerCreateGroup() throws StorageClientException, - AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, - client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); - - authorizableManager.delete("user2"); - authorizableManager.delete("user3"); - authorizableManager.delete("testgroup"); - - Assert.assertTrue(authorizableManager.createUser("user2", "TestUser2", null, ImmutableMap - .of("testkey", (Object) "testvalue", "principals", "administrators;testers"))); - Assert.assertTrue(authorizableManager.createUser("user3", "TestUser", null, ImmutableMap - .of("testkey", (Object) "testvalue", "principals", "administrators;testers"))); - Assert.assertTrue(authorizableManager.createGroup("testgroup", "Test Group", ImmutableMap - .of("testkey", (Object) "testvalue", "principals", "administrators;testers", - "members", "user1;user2"))); - Assert.assertFalse(authorizableManager.createGroup("testgroup", "Test Group", ImmutableMap - .of("testkey", (Object) "testvalue", "principals", "administrators;testers", - "members", "user1;user2", Authorizable.AUTHORIZABLE_TYPE_FIELD, - Authorizable.GROUP_VALUE))); - - Authorizable a = authorizableManager.findAuthorizable("testgroup"); - Assert.assertNotNull(a); - Assert.assertTrue(a instanceof Group); - Group g = (Group) a; - String[] principals = g.getPrincipals(); - LOGGER.info("Principals {} ", Arrays.toString(principals)); - Assert.assertArrayEquals(new String[] { "administrators", "testers", Group.EVERYONE }, principals); - String[] members = g.getMembers(); - LOGGER.info("Members {} ", Arrays.toString(members)); - Assert.assertArrayEquals(new String[] { "user1", "user2" }, members); - - g.setProperty("SomeValue", "AValue"); - g.setProperty(Authorizable.PASSWORD_FIELD, "badpassword"); - g.removeProperty("testkey"); - g.addPrincipal("tester2"); - g.removePrincipal("testers"); - // adding user 3 should make it a member of testgroup and give it the - // pricipal testgroup - g.addMember("user3"); - g.removeMember("user2"); - - principals = g.getPrincipals(); - List principalList = Lists.newArrayList(principals); - Collections.sort(principalList); - principals = principalList.toArray(new String[principalList.size()]); - LOGGER.info("Principals before save {} ", Arrays.toString(principals)); - Assert.assertArrayEquals(new String[] { "administrators", Group.EVERYONE, "tester2" }, principals); - members = g.getMembers(); - LOGGER.info("Members {} ", Arrays.toString(members)); - Assert.assertArrayEquals(new String[] { "user1", "user3" }, members); - - LOGGER.info("Updating Group with changed membership ----------------------"); - authorizableManager.updateAuthorizable(g); - LOGGER.info("Done Updating Group with changed membership ----------------------"); - - Authorizable a2 = authorizableManager.findAuthorizable("testgroup"); - Assert.assertNotNull(a2); - Assert.assertTrue(a2 instanceof Group); - Group g2 = (Group) a2; - principals = g2.getPrincipals(); - LOGGER.info("Principals {} ", Arrays.toString(principals)); - principalList = Lists.newArrayList(principals); - Collections.sort(principalList); - principals = principalList.toArray(new String[principalList.size()]); - Assert.assertArrayEquals(new String[] { "administrators", Group.EVERYONE, "tester2" }, principals); - members = g2.getMembers(); - LOGGER.info("Members {} ", Arrays.toString(members)); - Assert.assertArrayEquals(new String[] { "user1", "user3" }, members); - Assert.assertNull(g2.getProperty(Authorizable.PASSWORD_FIELD)); - - // Test that User3 no has testgroup as a principal. - Authorizable a3 = authorizableManager.findAuthorizable("user3"); - Assert.assertNotNull(a3); - Assert.assertFalse(a3 instanceof Group); - User u3 = (User) a3; - principals = u3.getPrincipals(); - LOGGER.info("Principals {} ", Arrays.toString(principals)); - Assert.assertArrayEquals(new String[] { "administrators", "testers", "testgroup", - Group.EVERYONE }, - principals); - - } - - @Test - public void testFindAuthorizable() throws StorageClientException, AccessDeniedException { - try { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl( - client, currentUser, configuration, sharedCache, new LoggingStorageListener()); - - AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, - client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); - - for (int i = 0; i < 10; i++) { - authorizableManager.delete("testfinduser" + i); - Assert.assertTrue(authorizableManager.createUser("testfinduser" + i, "TestUser", - null, ImmutableMap.of("rep:principalName", (Object) ("principal" + i), - "sakai:groupproperty", "groupprop", "sakai:userprop", "userprop"))); - authorizableManager.delete("testgroup" + i); - Assert.assertTrue(authorizableManager.createGroup("testgroup" + i, - "Test Group" + i, ImmutableMap.of("rep:principalName", - (Object) ("principal" + i), "sakai:groupproperty", "groupprop", - "sakai:grprop", "grprop"))); - } - for (int i = 0; i < 10; i++) { - Iterator userIterator = authorizableManager.findAuthorizable( - "rep:principalName", "principal" + i, User.class); - Assert.assertNotNull(userIterator); - Assert.assertTrue(userIterator.hasNext()); - Authorizable a = userIterator.next(); - Assert.assertFalse(userIterator.hasNext()); - Assert.assertTrue(a instanceof User); - User u = (User) a; - Assert.assertEquals("testfinduser" + i, u.getId()); - } - for (int i = 0; i < 10; i++) { - Iterator groupIterator = authorizableManager.findAuthorizable( - "rep:principalName", "principal" + i, Group.class); - Assert.assertNotNull(groupIterator); - Assert.assertTrue(groupIterator.hasNext()); - Authorizable a = groupIterator.next(); - Assert.assertFalse(groupIterator.hasNext()); - Assert.assertTrue(a instanceof Group); - Group u = (Group) a; - Assert.assertEquals("testgroup" + i, u.getId()); - } - for (int i = 0; i < 10; i++) { - Iterator groupIterator = authorizableManager.findAuthorizable( - "rep:principalName", "principal" + i, Authorizable.class); - Assert.assertNotNull(groupIterator); - Assert.assertTrue(groupIterator.hasNext()); - Authorizable a = groupIterator.next(); - if (a instanceof Group) { - Assert.assertEquals("testgroup" + i, a.getId()); - } else { - Assert.assertEquals("testfinduser" + i, a.getId()); - } - Assert.assertTrue(groupIterator.hasNext()); - a = groupIterator.next(); - if (a instanceof Group) { - Assert.assertEquals("testgroup" + i, a.getId()); - } else { - Assert.assertEquals("testfinduser" + i, a.getId()); - } - Assert.assertFalse(groupIterator.hasNext()); - } - } catch (UnsupportedOperationException e) { - LOGGER.warn("Finder methods not implemented, FIXME"); - } - - } - - @Test - public void testAuthorizableManagerNullProperties() throws StorageClientException, - AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManagerImpl = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - AuthorizableManagerImpl authorizableManager = new AuthorizableManagerImpl(currentUser, - client, configuration, accessControlManagerImpl, sharedCache, new LoggingStorageListener()); - - authorizableManager.delete("testuser"); - - Assert.assertTrue(authorizableManager.createUser("testuser", "Test User", "test", - null)); - Authorizable user = authorizableManager.findAuthorizable("testuser"); - Assert.assertNotNull(user); - Assert.assertTrue(user instanceof User); - - authorizableManager.delete("testgroup"); - Assert.assertTrue(authorizableManager.createGroup("testgroup", "Test Group", null)); - Authorizable group = authorizableManager.findAuthorizable("testgroup"); - Assert.assertNotNull(group); - Assert.assertTrue(group instanceof Group); - } - -} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/content/AbstractContentManagerTest.java b/src/test/java/org/sakaiproject/nakamura/lite/content/AbstractContentManagerTest.java deleted file mode 100644 index 0e443641..00000000 --- a/src/test/java/org/sakaiproject/nakamura/lite/content/AbstractContentManagerTest.java +++ /dev/null @@ -1,394 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.content; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.sakaiproject.nakamura.api.lite.CacheHolder; -import org.sakaiproject.nakamura.api.lite.ClientPoolException; -import org.sakaiproject.nakamura.api.lite.StorageClientException; -import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException; -import org.sakaiproject.nakamura.api.lite.authorizable.User; -import org.sakaiproject.nakamura.api.lite.content.Content; -import org.sakaiproject.nakamura.lite.ConfigurationImpl; -import org.sakaiproject.nakamura.lite.LoggingStorageListener; -import org.sakaiproject.nakamura.lite.accesscontrol.AccessControlManagerImpl; -import org.sakaiproject.nakamura.lite.accesscontrol.AuthenticatorImpl; -import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator; -import org.sakaiproject.nakamura.lite.storage.ConcurrentLRUMap; -import org.sakaiproject.nakamura.lite.storage.StorageClient; -import org.sakaiproject.nakamura.lite.storage.StorageClientPool; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Random; - -public abstract class AbstractContentManagerTest { - - private static final Logger LOGGER = LoggerFactory.getLogger(AbstractContentManagerTest.class); - private StorageClient client; - private ConfigurationImpl configuration; - private StorageClientPool clientPool; - private Map sharedCache = new ConcurrentLRUMap(1000); - - @Before - public void before() throws StorageClientException, AccessDeniedException, ClientPoolException, - ClassNotFoundException { - clientPool = getClientPool(); - client = clientPool.getClient(); - configuration = new ConfigurationImpl(); - Map properties = Maps.newHashMap(); - properties.put("keyspace", "n"); - properties.put("acl-column-family", "ac"); - properties.put("authorizable-column-family", "au"); - properties.put("content-column-family", "cn"); - configuration.activate(properties); - AuthorizableActivator authorizableActivator = new AuthorizableActivator(client, - configuration); - authorizableActivator.setup(); - LOGGER.info("Setup Complete"); - } - - protected abstract StorageClientPool getClientPool() throws ClassNotFoundException; - - @After - public void after() throws ClientPoolException { - client.close(); - } - - @Test - public void testCreateContent() throws StorageClientException, AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, - currentUser, configuration, null, new LoggingStorageListener()); - - ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, - configuration, null, new LoggingStorageListener()); - contentManager.update(new Content("/", ImmutableMap.of("prop1", (Object) "value1"))); - contentManager.update(new Content("/test", ImmutableMap.of("prop1", (Object) "value2"))); - contentManager - .update(new Content("/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); - - Content content = contentManager.get("/"); - Assert.assertEquals("/", content.getPath()); - Map p = content.getProperties(); - LOGGER.info("Properties is {}",p); - Assert.assertEquals("value1", (String)p.get("prop1")); - Iterator children = content.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - Content child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value2", (String)p.get("prop1")); - children = child.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test/ing", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value3", (String)p.get("prop1")); - - } - - @Test - public void testDeleteContent() throws StorageClientException, AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, - configuration, sharedCache, new LoggingStorageListener()); - contentManager.update(new Content("/", ImmutableMap.of("prop1", (Object) "value1"))); - contentManager.update(new Content("/test", ImmutableMap.of("prop1", (Object) "value2"))); - contentManager - .update(new Content("/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); - - Content content = contentManager.get("/"); - Assert.assertEquals("/", content.getPath()); - Map p = content.getProperties(); - Assert.assertEquals("value1", (String)p.get("prop1")); - Iterator children = content.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - Content child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value2", (String)p.get("prop1")); - children = child.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test/ing", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value3", (String)p.get("prop1")); - - contentManager.delete("/test/ing"); - content = contentManager.get("/test/ing"); - Assert.assertNull(content); - - } - - @Test - public void testUpdateContent() throws StorageClientException, AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, - configuration, sharedCache, new LoggingStorageListener()); - contentManager.update(new Content("/", ImmutableMap.of("prop1", (Object) "value1"))); - contentManager.update(new Content("/test", ImmutableMap.of("prop1", (Object) "value2"))); - contentManager - .update(new Content("/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); - - Content content = contentManager.get("/"); - Assert.assertEquals("/", content.getPath()); - Map p = content.getProperties(); - Assert.assertEquals("value1", (String)p.get("prop1")); - Iterator children = content.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - Content child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value2", (String)p.get("prop1")); - children = child.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test/ing", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value3", (String)p.get("prop1")); - - p = content.getProperties(); - Assert.assertNull((String)p.get("prop1update")); - - content.setProperty("prop1update", "value4"); - contentManager.update(content); - - content = contentManager.get(content.getPath()); - p = content.getProperties(); - Assert.assertEquals("value4", (String)p.get("prop1update")); - - } - - @Test - public void testVersionContent() throws StorageClientException, AccessDeniedException, - InterruptedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, - configuration, sharedCache, new LoggingStorageListener()); - contentManager.update(new Content("/", ImmutableMap.of("prop1", (Object) "value1"))); - contentManager.update(new Content("/test", ImmutableMap.of("prop1", (Object) "value2"))); - contentManager - .update(new Content("/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); - - Content content = contentManager.get("/"); - Assert.assertEquals("/", content.getPath()); - Map p = content.getProperties(); - Assert.assertEquals("value1", (String)p.get("prop1")); - Iterator children = content.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - Content child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value2", (String)p.get("prop1")); - children = child.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test/ing", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value3", (String)p.get("prop1")); - - p = content.getProperties(); - Assert.assertNull((String)p.get("prop1update")); - - // FIXME: add some version list methods, we have no way of testing if - // this works. - String versionName = contentManager.saveVersion("/"); - - // must reload after a version save. - content = contentManager.get("/"); - - content.setProperty("prop1update", "value4"); - contentManager.update(content); - - content = contentManager.get("/"); - p = content.getProperties(); - Assert.assertEquals("value4", (String)p.get("prop1update")); - - // just in case the machine is so fast all of that took 1ms - Thread.sleep(50); - - String versionName2 = contentManager.saveVersion("/"); - - Content versionContent = contentManager.getVersion("/", versionName); - Assert.assertNotNull(versionContent); - Content versionContent2 = contentManager.getVersion("/", versionName2); - Assert.assertNotNull(versionContent2); - List versionList = contentManager.getVersionHistory("/"); - Assert.assertNotNull(versionList); - Assert.assertArrayEquals("Version List is " + Arrays.toString(versionList.toArray()) - + " expecting " + versionName2 + " then " + versionName, new String[] { - versionName2, versionName }, versionList.toArray(new String[versionList.size()])); - - Content badVersionContent = contentManager.getVersion("/", "BadVersion"); - Assert.assertNull(badVersionContent); - - } - - @Test - public void testUploadContent() throws StorageClientException, AccessDeniedException { - AuthenticatorImpl AuthenticatorImpl = new AuthenticatorImpl(client, configuration); - User currentUser = AuthenticatorImpl.authenticate("admin", "admin"); - - AccessControlManagerImpl accessControlManager = new AccessControlManagerImpl(client, - currentUser, configuration, sharedCache, new LoggingStorageListener()); - - ContentManagerImpl contentManager = new ContentManagerImpl(client, accessControlManager, - configuration, sharedCache, new LoggingStorageListener()); - contentManager.update(new Content("/", ImmutableMap.of("prop1", (Object) "value1"))); - contentManager.update(new Content("/test", ImmutableMap.of("prop1", (Object) "value2"))); - contentManager - .update(new Content("/test/ing", ImmutableMap.of("prop1", (Object) "value3"))); - - Content content = contentManager.get("/"); - Assert.assertEquals("/", content.getPath()); - Map p = content.getProperties(); - Assert.assertEquals("value1", (String)p.get("prop1")); - Iterator children = content.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - Content child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value2", (String)p.get("prop1")); - children = child.listChildren().iterator(); - Assert.assertTrue(children.hasNext()); - child = children.next(); - Assert.assertFalse(children.hasNext()); - Assert.assertEquals("/test/ing", child.getPath()); - p = child.getProperties(); - Assert.assertEquals("value3", (String)p.get("prop1")); - - p = content.getProperties(); - Assert.assertNull((String)p.get("prop1update")); - - // FIXME: add some version list methods, we have no way of testing if - // this works. - contentManager.saveVersion("/"); - - content = contentManager.get("/"); - - content.setProperty("prop1update", "value4"); - contentManager.update(content); - - content = contentManager.get(content.getPath()); - p = content.getProperties(); - Assert.assertEquals("value4", (String)p.get("prop1update")); - - final byte[] b = new byte[20 * 1024 * 1024 + 1231]; - Random r = new Random(); - r.nextBytes(b); - try { - contentManager.update(new Content("/test/ing/testfile.txt", ImmutableMap.of( - "testproperty", (Object) "testvalue"))); - long su = System.currentTimeMillis(); - ByteArrayInputStream bais = new ByteArrayInputStream(b); - contentManager.writeBody("/test/ing/testfile.txt", bais); - bais.close(); - long eu = System.currentTimeMillis(); - - InputStream read = contentManager.getInputStream("/test/ing/testfile.txt"); - - int i = 0; - byte[] buffer = new byte[8192]; - int j = read.read(buffer); - Assert.assertNotSame(-1, j); - while (j != -1) { - // Assert.assertEquals((int)b[i] & 0xff, j); - i = i + j; - j = read.read(buffer); - } - read.close(); - Assert.assertEquals(b.length, i); - long ee = System.currentTimeMillis(); - LOGGER.info("Write rate {} MB/s Read Rate {} MB/s ", - (1000 * (double) b.length / (1024 * 1024 * (double) (eu - su))), - (1000 * (double) b.length / (1024 * 1024 * (double) (ee - eu)))); - - // Update content and re-read - r.nextBytes(b); - bais = new ByteArrayInputStream(b); - contentManager.writeBody("/test/ing/testfile.txt", bais); - - read = contentManager.getInputStream("/test/ing/testfile.txt"); - - i = 0; - j = read.read(buffer); - Assert.assertNotSame(-1, j); - while (j != -1) { - for (int k = 0; k < j; k++) { - Assert.assertEquals(b[i], buffer[k]); - i++; - } - if ((i % 100 == 0) && (i < b.length - 20)) { - Assert.assertEquals(10, read.skip(10)); - i += 10; - } - j = read.read(buffer); - } - read.close(); - Assert.assertEquals(b.length, i); - - } catch (IOException e) { - - // TODO Auto-generated catch block - e.printStackTrace(); - Assert.fail(); - } - - } - -} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/DerbySetup.java b/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/DerbySetup.java deleted file mode 100644 index 9fd6d23d..00000000 --- a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/derby/DerbySetup.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.jdbc.derby; - -import com.google.common.collect.ImmutableMap; - -import org.sakaiproject.nakamura.lite.storage.jdbc.JDBCStorageClientPool; - -public class DerbySetup { - - private static JDBCStorageClientPool clientPool = createClientPool(); - - private synchronized static JDBCStorageClientPool createClientPool() { - try { - JDBCStorageClientPool connectionPool = new JDBCStorageClientPool(); - connectionPool.activate(ImmutableMap.of(JDBCStorageClientPool.CONNECTION_URL, - (Object) "jdbc:derby:memory:MyDB;create=true", - JDBCStorageClientPool.JDBC_DRIVER, "org.apache.derby.jdbc.EmbeddedDriver")); - return connectionPool; - } catch (ClassNotFoundException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - public static JDBCStorageClientPool getClientPool() { - return clientPool; - } - -} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/MysqlSetup.java b/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/MysqlSetup.java deleted file mode 100644 index 7df42483..00000000 --- a/src/test/java/org/sakaiproject/nakamura/lite/jdbc/mysql/MysqlSetup.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Sakai Foundation (SF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The SF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package org.sakaiproject.nakamura.lite.jdbc.mysql; - -import com.google.common.collect.ImmutableMap; - -import org.sakaiproject.nakamura.lite.storage.jdbc.JDBCStorageClientPool; - -public class MysqlSetup { - - private static JDBCStorageClientPool clientPool = createClientPool(); - - public synchronized static JDBCStorageClientPool createClientPool() { - try { - JDBCStorageClientPool connectionPool = new JDBCStorageClientPool(); - connectionPool - .activate(ImmutableMap - .of(JDBCStorageClientPool.CONNECTION_URL, - (Object) "jdbc:mysql://127.0.0.1:3306/sakai22?useUnicode=true&characterEncoding=UTF-8", - JDBCStorageClientPool.JDBC_DRIVER, "com.mysql.jdbc.Driver", - "username", "sakai22", "password", "sakai22")); - return connectionPool; - } catch (ClassNotFoundException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - public static JDBCStorageClientPool getClientPool() { - return clientPool; - } - -} diff --git a/src/test/java/org/sakaiproject/nakamura/lite/types/TestTypes.java b/src/test/java/org/sakaiproject/nakamura/lite/types/TestTypes.java deleted file mode 100644 index 4c81aeb9..00000000 --- a/src/test/java/org/sakaiproject/nakamura/lite/types/TestTypes.java +++ /dev/null @@ -1,193 +0,0 @@ -package org.sakaiproject.nakamura.lite.types; - -import com.google.common.collect.Maps; - -import junit.framework.Assert; - -import org.junit.Test; - -import java.io.IOException; -import java.io.InputStream; -import java.math.BigDecimal; -import java.util.Calendar; -import java.util.Map; -import java.util.TimeZone; - -public class TestTypes { - - - @Test - public void testTypes() { - Map> typeById = Types.getTypeByIdMap(); - Assert.assertNotNull(typeById); - @SuppressWarnings("unused") - Map, Type> typeByClass = Types.getTypeMap(); - Assert.assertNotNull(typeById); - } - - @Test - public void testWriteTypes() throws IOException { - Map map = Maps.newHashMap(); - map.put("A", 1); - map.put("B", Long.MAX_VALUE); - map.put("C", "String"); - map.put("D", new BigDecimal("12345.12E23")); - Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("BST")); - cal.setTimeInMillis(System.currentTimeMillis()); - map.put("E", cal); - map.put("F", (double)0.1); - map.put("G", true); - map.put("H", false); - map.put("J", null); - - InputStream in = Types.storeMapToStream("testkey", map); - Map output = Maps.newHashMap(); - Types.loadFromStream("testkey", output, in); - - Integer a = (Integer) map.get("A"); - Assert.assertNotNull(a); - Assert.assertEquals(1, a.intValue()); - Long b = (Long) map.get("B"); - Assert.assertNotNull(b); - Assert.assertEquals(Long.MAX_VALUE, b.longValue()); - String c = (String) map.get("C"); - Assert.assertNotNull(c); - Assert.assertEquals("String", c); - BigDecimal d = (BigDecimal) map.get("D"); - Assert.assertNotNull(d); - Assert.assertEquals(new BigDecimal("12345.12E23"), d); - Calendar e = (Calendar) map.get("E"); - Assert.assertNotNull(e); - Assert.assertEquals(cal, e); - Assert.assertEquals(cal.getTimeInMillis(), e.getTimeInMillis()); - Assert.assertEquals(cal.getTimeZone(), e.getTimeZone()); - Double f = (Double) map.get("F"); - Assert.assertNotNull(f); - Assert.assertEquals(0.1, f); - Boolean g = (Boolean) map.get("G"); - Assert.assertNotNull(g); - Assert.assertTrue(g.booleanValue()); - Boolean h = (Boolean) map.get("H"); - Assert.assertNotNull(h); - Assert.assertFalse(h.booleanValue()); - Object j = map.get("J"); - Assert.assertNull(j); - - - - } - - @Test - public void testWriteArrayTypes() throws IOException { - Map map = Maps.newHashMap(); - map.put("A", new int[]{1,2}); - map.put("B", new long[]{Long.MIN_VALUE,Long.MAX_VALUE}); - map.put("C", new String[]{"StringA","StringB"}); - map.put("D", new BigDecimal[]{new BigDecimal("12345.12E23"),new BigDecimal("12345.12E21")}); - Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("BST")); - cal.setTimeInMillis(System.currentTimeMillis()); - Calendar cal2 = Calendar.getInstance(TimeZone.getTimeZone("PST")); - cal2.setTimeInMillis(System.currentTimeMillis()); - map.put("E", new Calendar[]{cal,cal2}); - map.put("F", new double[]{0.1,0.2}); - map.put("G", new boolean[]{true,false}); - map.put("H", new boolean[]{false,true}); - - InputStream in = Types.storeMapToStream("testkey", map); - Map output = Maps.newHashMap(); - Types.loadFromStream("testkey", output, in); - - int[] a = (int[]) map.get("A"); - Assert.assertNotNull(a); - Assert.assertEquals(2, a.length); - Assert.assertEquals(1, a[0]); - Assert.assertEquals(2, a[1]); - long[] b = (long[]) map.get("B"); - Assert.assertNotNull(b); - Assert.assertEquals(2, b.length); - Assert.assertEquals(Long.MIN_VALUE, b[0]); - Assert.assertEquals(Long.MAX_VALUE, b[1]); - String[] c = (String[]) map.get("C"); - Assert.assertNotNull(c); - Assert.assertEquals(2, c.length); - Assert.assertEquals("StringA", c[0]); - Assert.assertEquals("StringB", c[1]); - BigDecimal[] d = (BigDecimal[]) map.get("D"); - Assert.assertNotNull(d); - Assert.assertEquals(2, d.length); - Assert.assertEquals(new BigDecimal("12345.12E23"), d[0]); - Assert.assertEquals(new BigDecimal("12345.12E21"), d[1]); - Calendar[] e = (Calendar[]) map.get("E"); - Assert.assertNotNull(e); - Assert.assertEquals(2, e.length); - Assert.assertEquals(cal, e[0]); - Assert.assertEquals(cal.getTimeInMillis(), e[0].getTimeInMillis()); - Assert.assertEquals(cal.getTimeZone(), e[0].getTimeZone()); - Assert.assertEquals(cal2, e[1]); - Assert.assertEquals(cal2.getTimeInMillis(), e[1].getTimeInMillis()); - Assert.assertEquals(cal2.getTimeZone(), e[1].getTimeZone()); - double[] f = (double[]) map.get("F"); - Assert.assertNotNull(f); - Assert.assertEquals(2, f.length); - Assert.assertEquals(0.1, f[0]); - Assert.assertEquals(0.2, f[1]); - boolean[] g = (boolean[]) map.get("G"); - Assert.assertNotNull(g); - Assert.assertEquals(2, g.length); - Assert.assertTrue(g[0]); - Assert.assertFalse(g[1]); - boolean[] h = (boolean[]) map.get("H"); - Assert.assertNotNull(h); - Assert.assertEquals(2, h.length); - Assert.assertFalse(h[0]); - Assert.assertTrue(h[1]); - - - - } - @Test - public void testWriteEmptyArrayTypes() throws IOException { - Map map = Maps.newHashMap(); - map.put("A", new int[]{}); - map.put("B", new long[]{}); - map.put("C", new String[]{}); - map.put("D", new BigDecimal[]{}); - map.put("E", new Calendar[]{}); - map.put("F", new double[]{}); - map.put("G", new boolean[]{}); - map.put("H", new boolean[]{}); - - InputStream in = Types.storeMapToStream("testkey", map); - Map output = Maps.newHashMap(); - Types.loadFromStream("testkey", output, in); - - int[] a = (int[]) map.get("A"); - Assert.assertNotNull(a); - Assert.assertEquals(0, a.length); - long[] b = (long[]) map.get("B"); - Assert.assertNotNull(b); - Assert.assertEquals(0, b.length); - String[] c = (String[]) map.get("C"); - Assert.assertNotNull(c); - Assert.assertEquals(0, c.length); - BigDecimal[] d = (BigDecimal[]) map.get("D"); - Assert.assertNotNull(d); - Assert.assertEquals(0, d.length); - Calendar[] e = (Calendar[]) map.get("E"); - Assert.assertNotNull(e); - Assert.assertEquals(0, e.length); - double[] f = (double[]) map.get("F"); - Assert.assertNotNull(f); - Assert.assertEquals(0, f.length); - boolean[] g = (boolean[]) map.get("G"); - Assert.assertNotNull(g); - Assert.assertEquals(0, g.length); - boolean[] h = (boolean[]) map.get("H"); - Assert.assertNotNull(h); - Assert.assertEquals(0, h.length); - - - - } - -}