|
|
Line 25: |
Line 25: |
| ==Dazzle Example adaptor for 1.6 spec source compliant with the registry == | | ==Dazzle Example adaptor for 1.6 spec source compliant with the registry == |
| | | |
− | <pre>
| + | Subversion develpment repository http://www.derkholm.net/svn/repos/dazzle/branches/16Dazzle |
− | /*
| + | See class org.biojava.servlets.dazzle.datasource.SimpleFile16ExampleSource for a datasource that conforms to the new 1.6 Spec. |
− | * BioJava development code
| |
− | *
| |
− | * This code may be freely distributed and modified under the
| |
− | * terms of the GNU Lesser General Public Licence. This should
| |
− | * be distributed with the code. If you do not have a copy,
| |
− | * see:
| |
− | *
| |
− | * http://www.gnu.org/copyleft/lesser.html
| |
− | *
| |
− | * Copyright for this code is held jointly by the individual
| |
− | * authors. These should be listed in @author doc comments.
| |
− | *
| |
− | * For more information on the BioJava project and its aims,
| |
− | * or to join the biojava-l mailing list, visit the home page
| |
− | * at:
| |
− | *
| |
− | * http://www.biojava.org/
| |
− | *
| |
− | * Created on 10.09.2009
| |
− | * @author Jonathan Warren
| |
− | *
| |
− | */
| |
− | | |
− | package uk.ac.sanger.dazzle.datasource;
| |
− | import java.sql.Connection;
| |
− | import java.sql.PreparedStatement;
| |
− | import java.sql.ResultSet;
| |
− | import java.sql.SQLException;
| |
− | import java.util.ArrayList;
| |
− | import java.util.Collections;
| |
− | import java.util.HashSet;
| |
− | import java.util.List;
| |
− | import java.util.Set;
| |
− | | |
− | import javax.naming.Context;
| |
− | import javax.naming.InitialContext;
| |
− | import javax.servlet.ServletContext;
| |
− | import javax.sql.DataSource;
| |
− | | |
− | import org.biojava.servlets.dazzle.Segment;
| |
− | import org.biojava.servlets.dazzle.datasource.Abstract16FeatureSource;
| |
− | import org.biojava.servlets.dazzle.datasource.AbstractGFFFeatureSource;
| |
− | import org.biojava.servlets.dazzle.datasource.DASGFFGroup;
| |
− | import org.biojava.servlets.dazzle.datasource.DataSourceException;
| |
− | import org.biojava.servlets.dazzle.datasource.GFF16Feature;
| |
− | import org.biojava.servlets.dazzle.datasource.GFFFeature;
| |
− | import org.biojava.servlets.dazzle.datasource.Type16MetadataSource;
| |
− | import org.biojava.servlets.dazzle.datasource.TypeMetadataSource;
| |
− | | |
− | public class DazzleTest16Source extends Abstract16FeatureSource implements Type16MetadataSource {
| |
− | DataSource dataSource;
| |
− | String dataSourceName;
| |
− |
| |
− | public String getDataSourceName() {
| |
− | return dataSourceName;
| |
− | }
| |
− | | |
− | | |
− | | |
− | public void setDataSourceName(String dataSourceName) {
| |
− | this.dataSourceName = dataSourceName;
| |
− | }
| |
− | | |
− | | |
− | | |
− | public void init (ServletContext ctx) throws DataSourceException{
| |
− | System.out.println("init DazzleTest15Source " + dataSourceName);
| |
− |
| |
− | super.init(ctx);
| |
− |
| |
− |
| |
− |
| |
− | try {
| |
− | Context env = (Context) new InitialContext().lookup("java:comp/env");
| |
− | System.out.println("trying to get datasource from context");
| |
− | dataSource = (DataSource) env.lookup(dataSourceName) ;
| |
− | } catch (Exception ex) {
| |
− | throw new DataSourceException (ex);
| |
− | }
| |
− | }
| |
− | | |
− | | |
− | | |
− | public GFF16Feature[] getFeatures(Segment seg, String[] types)
| |
− | throws DataSourceException {
| |
− | //all types are the same for this source so filtering on tyes is unecessary
| |
− | Connection conn =null;
| |
− |
| |
− | List <GFF16Feature>gffFeatures = new ArrayList<GFF16Feature>();
| |
− | try {
| |
− | conn = dataSource.getConnection();
| |
− | | |
− | // create Structure object from MSD database query.
| |
− | } catch (SQLException e) {
| |
− | e.printStackTrace();
| |
− |
| |
− | }
| |
− | //get data from segment object
| |
− | String ref=seg.getReference();
| |
− | String start=Integer.toString(seg.getStart());
| |
− | String stop=Integer.toString(seg.getStop());
| |
− | //this source is attached to the generic grouped_db database schema
| |
− | //proserver sourceAdapter grouped_db.pm
| |
− | //for the proserver_vertres_mouse_snps_WSB_EiJ database they could just have had one table
| |
− | //the feature table and not fgroup as they only have same id and everything else is null!!!
| |
− | //as a generic source we will leave the query as it is though!!
| |
− | //maybe i need another example with groups in !! especially for 1.6 testing!!!
| |
− | String sql="SELECT * FROM feature, fgroup "+
| |
− | " WHERE segment = ? "+
| |
− | " AND start <= ? "+" AND end >=? "+
| |
− | " AND feature.group_id = fgroup.group_id "+
| |
− | " ORDER BY start;";
| |
− | //System.out.println(sql);
| |
− | try {
| |
− | PreparedStatement ps=conn.prepareStatement(sql);
| |
− | ps.setString(1,ref);
| |
− | ps.setString(2,stop);
| |
− | ps.setString(3, start);
| |
− | ResultSet results=ps.executeQuery();
| |
− |
| |
− | while(results.next()){
| |
− | GFF16Feature feature=new GFF16Feature();
| |
− | feature.setName(results.getString("id"));
| |
− | //System.out.println("adding id="+results.getString("id"));
| |
− | feature.setStart(results.getString("start"));
| |
− | feature.setEnd(results.getString("end"));
| |
− |
| |
− | DASGFFGroup group=new DASGFFGroup(results.getString("group_id"),"Coding SNP");
| |
− | feature.setLabel(results.getString("id"));
| |
− | feature.setMethod("samtools");//can get it from database but they are all same in this source
| |
− | feature.setOrientation(results.getString("orient"));
| |
− | //feature.setType(results.getString("type_id"));
| |
− | //actually type should be ontology from SO for snp it's SO:0000694
| |
− | feature.setType(results.getString("type_id"));
| |
− | //feature.setTypeCategory(results.getString("type_catagory"));
| |
− | //ECO:0000053 inferred from reviewed computational analysis
| |
− | feature.setTypeCategory(results.getString("type_cat"));//"ECO:0000053");//normally would come from the database
| |
− | feature.setTypeId(results.getString("type_id"));//"SO:0000694");//normally would come from the database
| |
− |
| |
− | feature.setGroup(group);
| |
− |
| |
− | //filter by types if on the end of the url has ;type=exon
| |
− | if (types!=null) {
| |
− | for (String type : types) {
| |
− | //System.out.println("types=" + type);
| |
− | if(type.equals(results.getString("type_id"))){
| |
− | gffFeatures.add(feature);
| |
− | }
| |
− | }
| |
− | }else{
| |
− | gffFeatures.add(feature);
| |
− | }
| |
− | }
| |
− |
| |
− |
| |
− | | |
− |
| |
− |
| |
− | ps.close();
| |
− | conn.close();
| |
− |
| |
− | } catch (SQLException e) {
| |
− | // TODO Auto-generated catch block
| |
− | e.printStackTrace();
| |
− | }
| |
− |
| |
− |
| |
− | GFF16Feature[] gffs = gffFeatures.toArray(new GFF16Feature[gffFeatures.size()]) ;
| |
− |
| |
− |
| |
− | return gffs;
| |
− | }
| |
− |
| |
− |
| |
− | public String getFeatureID(GFFFeature f) {
| |
− | //override this method as our feature ids are unique and same as name anyway
| |
− | StringBuffer sb = new StringBuffer();
| |
− |
| |
− | sb.append(f.getName());
| |
− |
| |
− | return sb.toString();
| |
− |
| |
− | }
| |
− |
| |
− | public Set<String> getAllTypes(){
| |
− | Set<String> list=new HashSet<String>();
| |
− | Connection conn =null;
| |
− |
| |
− | try {
| |
− | conn = dataSource.getConnection();
| |
− | | |
− | // create Structure object from MSD database query.
| |
− | } catch (SQLException e) {
| |
− | e.printStackTrace();
| |
− |
| |
− | }
| |
− | //the type_id and category in the database are the wrong way around
| |
− | //ie the type_id will be better as the category "variation" while the type_id is non_synonymous or synonymous
| |
− | //this is because the id needs to be unique and have one category
| |
− | String sql="SELECT distinct type_id FROM feature";
| |
− | //System.out.println(sql);
| |
− | try {
| |
− | PreparedStatement ps=conn.prepareStatement(sql);
| |
− |
| |
− | ResultSet results=ps.executeQuery();
| |
− |
| |
− | while(results.next()){
| |
− | String type_id=results.getString("type_id");
| |
− | list.add(type_id);
| |
− | //String type_category=results.getString("type_category");
| |
− |
| |
− | }
| |
− |
| |
− | | |
− |
| |
− |
| |
− | ps.close();
| |
− |
| |
− |
| |
− | } catch (SQLException e) {
| |
− | // TODO Auto-generated catch block
| |
− | e.printStackTrace();
| |
− | }
| |
− |
| |
− |
| |
− |
| |
− | //list.add("Coding SNP");
| |
− | return Collections.unmodifiableSet(list);
| |
− | }
| |
− | | |
− | | |
− | | |
− | public String getCategory(String arg0) {
| |
− | Connection conn =null;
| |
− | String type_category=null;
| |
− | try {
| |
− | conn = dataSource.getConnection();
| |
− | | |
− | // create Structure object from MSD database query.
| |
− | } catch (SQLException e) {
| |
− | e.printStackTrace();
| |
− |
| |
− | }
| |
− |
| |
− | String sql="SELECT type_cat FROM feature where type_id=?";
| |
− | System.out.println(sql);
| |
− | try {
| |
− | PreparedStatement ps=conn.prepareStatement(sql);
| |
− | ps.setString(1, arg0);
| |
− | ResultSet results=ps.executeQuery();
| |
− | | |
− | while(results.next()){
| |
− | type_category=results.getString("type_cat");
| |
− |
| |
− | //String type_category=results.getString("type_category");
| |
− |
| |
− | }
| |
− |
| |
− | | |
− |
| |
− |
| |
− | ps.close();
| |
− |
| |
− |
| |
− | } catch (SQLException e) {
| |
− |
| |
− | e.printStackTrace();
| |
− | }
| |
− |
| |
− | return type_category;
| |
− | }
| |
− | | |
− | | |
− | public String getTypeCvId(String type) {
| |
− |
| |
− | return "cvId here";
| |
− | }
| |
− | | |
− | | |
− | | |
− | public int getTypeCount(String t) {
| |
− | //some database call should go here
| |
− | int typeCount=0;
| |
− | Connection conn =null;
| |
− | String type_category=null;
| |
− | try {
| |
− | conn = dataSource.getConnection();
| |
− | | |
− | // create Structure object from MSD database query.
| |
− | } catch (SQLException e) {
| |
− | e.printStackTrace();
| |
− |
| |
− | }
| |
− |
| |
− | String sql="select type_id, COUNT(*) from feature where type_id=?";
| |
− | //System.out.println(sql);
| |
− | try {
| |
− | PreparedStatement ps=conn.prepareStatement(sql);
| |
− | ps.setString(1, t);
| |
− | ResultSet results=ps.executeQuery();
| |
− |
| |
− | while(results.next()){
| |
− | typeCount=results.getInt(2);
| |
− |
| |
− | //String type_category=results.getString("type_category");
| |
− |
| |
− | }
| |
− |
| |
− | | |
− |
| |
− |
| |
− | ps.close();
| |
− | conn.close();
| |
− |
| |
− |
| |
− | } catch (SQLException e) {
| |
− |
| |
− | e.printStackTrace();
| |
− | }
| |
− | return typeCount;
| |
− | }
| |
− | | |
− | | |
− | | |
− |
| |
− | | |
− |
| |
− | }
| |
− | </pre>
| |
This is a working document and a proposal for a reworked DAS specification which hopes to:
Note: the DAS specification is a technical document but with some effort should hopefully be readable and understandable by people without a deep understanding of broader technical issues and other system architectures. That is, it makes only basic assumptions.