论坛首页 Java企业应用论坛

基于red5 dsj 的java 处理视频流和一些疑问

浏览 3790 次
精华帖 (0) :: 良好帖 (0) :: 新手帖 (0) :: 隐藏帖 (0)
作者 正文
   发表时间:2013-06-19  
最近夭折了一个项目,由于我预研失败,同时我将被惩罚,被外派去其他公司干活,干完再回来。
下面来说说这个项目。最后再总结为什么失败。
基于red5的视频聊天,桌面共享。下图是简单的逻辑图

 
做视频聊天,做桌面共享,而且要在手机端也能显示出来,因此采用red5 这个开源的流媒体服务器。
 
公司的产品是典型的基于服务的多客户端模式。
简单的说是一个java后台发布服务,客户端有windows桌面版(java);mac桌面(java);android;ios,由于是自动化办公,所以集成了im,office,网盘等等功能。
 
下面看看怎么玩的。
1  dsj 这是微软的dictShow的java 版,免费使用,非开源
下载下来以后里面有个dsj.dll放如c盘 system32里面。
【简单调用本地摄像头】
package com.oatos.red5;
/**
dsj demo code.
You may use, modify and redistribute this code under the terms laid out in the header of the DSJDemo application.
copyright 2009
N.Peters
humatic GmbH
Berlin, Germany
**/

import de.humatic.dsj.*;

import javax.swing.*;

/**
Updated for 0_8_6

Demonstrates changing formats delivered by a capture device before and after the filtergraph is built.
Configuration before building the graph is done via a ready made dialog, whose source-code can be found in the
main dsj demo.

dsj 0_8_51 adds some missing functionality to address single output pins on capture devices with separate capture
and preview pins. This makes things a little more complex, but finally also takes hardware reality into account
after the graph is built.
0_8_6 intoduces the setCropSize method, which enables fine grain control of output sizes with devices, that
otherwise only offer limited choice of dimensions with their supported formats. This mostly applies to the more
professional capture boards and cameras. For example Viewcast Osprey boards or IDS Ueye cameras will only have one
dimension to select from, but then can shrink and / or crop from that. There is no clear standard, so the
implementation is driver specific.
Also new in 0_8_6 are api methods to configure analog video digitizers.
**/

public class MyCaptureFormats implements java.beans.PropertyChangeListener {

	private DSCapture graph;

	private DSCapture.CaptureDevice vDev;

	private DSFilter.DSPin activeOut,
						   previewOut,
						   captureOut;

	private DSMediaType [] mf;

	private JFrame f,
				   imageFrame;

	private JComboBox formatSelector;

	private JSpinner fpsSpinner;

	int FORMAT_INDEX = 1;

	boolean changingFormat,
			flip;

	private String[] tvFormatStr = new String[]{"ATV_None",
												"ATV_NTSC_M",
												"ATV_NTSC_M_J",
												"ATV_NTSC_433",
												"",
												"ATV_PAL_B",
												"ATV_PAL_D",
												"",
												"ATV_PAL_H",
												"ATV_PAL_I",
												"ATV_PAL_M",
												"ATV_PAL_N",
												"ATV_PAL_60",
												"ATV_SECAM_B",
												"ATV_SECAM_D",
												"ATV_SECAM_G",
												"ATV_SECAM_H",
												"ATV_SECAM_K ",
												"ATV_SECAM_K1",
												"ATV_SECAM_L",
												"ATV_SECAM_L1",
											"ATV_PAL_N_COMBO"};

	public MyCaptureFormats() {}

	public void createGraph() {

		f = new JFrame("dsj - CaptureFormats, 0_8_6");

		//graph = DSCapture.fromUserDialog(f, DSFiltergraph.DD7, this);
		
		DSFilterInfo[][] dsi = DSCapture.queryDevices();

		/** this sample only uses video **/

		graph = new DSCapture(DSFiltergraph.DD7, dsi[0][0], false, DSFilterInfo.doNotRender(), this);

		f.add(java.awt.BorderLayout.CENTER, graph.asComponent());

		vDev = graph.getActiveVideoDevice();

		previewOut = vDev.getDeviceOutput(DSCapture.CaptureDevice.PIN_CATEGORY_PREVIEW);

		captureOut = vDev.getDeviceOutput(DSCapture.CaptureDevice.PIN_CATEGORY_CAPTURE);

		/**
		We're only interested in the preview output for this demo, but a lot of devices (webcams amongst others)
		do not have a separate preview pin (preview is then built via a Tee filter)
		**/

		activeOut = previewOut != null ? previewOut : captureOut;

		int pinIndex = activeOut.getIndex();

		DSFilterInfo.DSPinInfo usedPinInfo = vDev.getFilterInfo().getDownstreamPins()[pinIndex];

		formatSelector = new JComboBox();

		formatSelector.setLightWeightPopupEnabled(false);

		mf = usedPinInfo.getFormats();

		for (int i = 0; i < mf.length; i++) {

			formatSelector.addItem(mf[i].getDisplayString());

		}

		int currentFormat = vDev.getSelectedFormat(activeOut);

		try{ formatSelector.setSelectedIndex(currentFormat); } catch (Exception iae){  }

		formatSelector.addActionListener(new java.awt.event.ActionListener() {

			public void actionPerformed(java.awt.event.ActionEvent e) {

				if (changingFormat) return;

				try{

					changingFormat = true;

					vDev.setOutputFormat(activeOut, formatSelector.getSelectedIndex());

				} catch (Exception ex){ex.printStackTrace();}

			}

		});

		try{

			DSFilterInfo.DSMediaFormat dsmf = (DSFilterInfo.DSMediaFormat)(mf[currentFormat]);

			fpsSpinner = new JSpinner(new SpinnerNumberModel(
											  (int)(previewOut != null ? vDev.getFrameRate(previewOut) : vDev.getFrameRate(captureOut)),
											  (int)(dsmf.getFrameRateRange()[0])-5,
											  (int)(dsmf.getFrameRateRange()[1])+5,
											  5));
		} catch (Exception e){

			fpsSpinner = new JSpinner(new SpinnerNumberModel(15, 0, 30, 5));

		}

		fpsSpinner.addChangeListener(new javax.swing.event.ChangeListener() {

			public void stateChanged(javax.swing.event.ChangeEvent ce){

				if (changingFormat) return;

				try{

					vDev.setFrameRate(activeOut, Float.parseFloat(fpsSpinner.getValue().toString()));

					System.out.println((previewOut != null ? "preview" : "capture")+" fps: "+vDev.getFrameRate(activeOut));

				} catch (Exception e){ System.out.println(e.toString()); }

			}
		});

		/**
		You can take the short way home and change things with a user dialog. See the SwingPropPage sample for a
		way to make this more java-like.
		**/

		final JButton fd = new JButton("WDM dialog");

		fd.addActionListener(new java.awt.event.ActionListener() {

			public void actionPerformed(java.awt.event.ActionEvent e) {

				changingFormat = true;

				int result = graph.getActiveVideoDevice().showDialog(DSCapture.CaptureDevice.WDM_PREVIEW);

				if (result < 0) {

					System.out.println("cant show preview dialog: "+result);

					result = graph.getActiveVideoDevice().showDialog(DSCapture.CaptureDevice.WDM_CAPTURE);

				}

				if (result < 0) {

					System.out.println("no capture dialog either: "+result);

					result = graph.getActiveVideoDevice().showDialog(DSCapture.CaptureDevice.WDM_DEVICE);

				}

				if (result < 0) changingFormat = false;

			}

		});

		JPanel ctrls = new JPanel();

		ctrls.add(new JLabel("Setting format on pin: "+(previewOut != null ? previewOut.getName() : captureOut.getName())+"  "));
		ctrls.add(formatSelector);
		ctrls.add(new JLabel(" fps: "));
		ctrls.add(fpsSpinner);
		ctrls.add(fd);
		//f.add(java.awt.BorderLayout.NORTH, ctrls);

		final JCheckBox continous = new JCheckBox("grab continously");

		final JButton gi = new JButton("拍照");

		gi.addActionListener(new java.awt.event.ActionListener() {

			public void actionPerformed(java.awt.event.ActionEvent e) {

				/**
				The grabbed frame will always be 24bit BGR unless the graph is built with the YUV flag set.
				In that case getImage() will return null, but YUV data is available via getData(). Requires
				a YUV source of course.
				**/

				java.awt.image.BufferedImage bi = graph.getImage();

				final JLabel imgLabel = new JLabel(new ImageIcon(bi));

				//final java.awt.image.WritableRaster raster = bi.getWritableTile(bi.getWidth(), bi.getHeight());

				//byte[] imgData = ((java.awt.image.DataBufferByte)raster.getDataBuffer()).getData();

				try{ imageFrame.dispose(); } catch (NullPointerException ne){}

				imageFrame = new JFrame("dsj captured frame");

				imageFrame.add(java.awt.BorderLayout.CENTER, imgLabel);

				JTextArea ta = new JTextArea(bi.toString(), 5, 1);

				ta.setLineWrap(true);

				imageFrame.add(java.awt.BorderLayout.SOUTH, ta);

				imageFrame.setPreferredSize(new java.awt.Dimension(bi.getWidth()+100, bi.getHeight()+200));

				imageFrame.pack();

				imageFrame.setLocation(600, 200);

				imageFrame.setVisible(true);

				if (!continous.isSelected()) return;

				Runnable r = new Runnable() {

					public void run(){

						while(imageFrame.isVisible()) {

							try{

								graph.getImage();

								imgLabel.updateUI();

								Thread.sleep(40);

							} catch (Exception e){}

						}

					}

				};

				new Thread(r).start();

			}

		});

		final JPanel lowerCtrls = new JPanel(new java.awt.GridLayout(2,3));

		lowerCtrls.setBorder(new javax.swing.border.EmptyBorder(5,5,5,5));

		((java.awt.GridLayout)(lowerCtrls.getLayout())).setVgap(5);

		((java.awt.GridLayout)(lowerCtrls.getLayout())).setHgap(5);

		final JPanel cropCoords = new JPanel(new java.awt.GridLayout(1,4));

		final int[] coords = new int[]{0, 0, (int)(graph.getMediaDimension().getWidth()), (int)(graph.getMediaDimension().getHeight())};

		for (int i = 0; i < 4; i++) {

			final JTextField ct = new JTextField(String.valueOf(coords[i]));

			ct.setPreferredSize(new java.awt.Dimension(30, 22));

			cropCoords.add(ct);

		}

		final JButton crop = new JButton("crop/size image");

		crop.addActionListener(new java.awt.event.ActionListener() {

			public void actionPerformed(java.awt.event.ActionEvent e) {

				int[] caps = activeOut.getFormatCaps();

				for (int i = 0; i < 4; i++) coords[i] = Integer.valueOf(((JTextField)cropCoords.getComponent(i)).getText().trim()).intValue();

				changingFormat = true;

				graph.getActiveVideoDevice().setCropSize(activeOut, coords[0], coords[1], coords[2], coords[3]);

			}

		});

		JPanel fl = new JPanel(new java.awt.GridLayout(1,3));

		final JCheckBox hor = new JCheckBox("左反转");

		final JCheckBox ver = new JCheckBox("上反转");

		final JButton flip = new JButton("反转");

		flip.addActionListener(new java.awt.event.ActionListener() {

			public void actionPerformed(java.awt.event.ActionEvent e) {

				boolean flipGrabbedImage = true;

				int fli = (hor.isSelected() ? 2 : 0) | (ver.isSelected() ? 1 : 0) | (flipGrabbedImage ? 4 : 0);

				graph.flipImage(fli);

			}

		});

		fl.add(hor);

		fl.add(ver);

		fl.add(flip);

		final javax.swing.JComboBox tvf = new javax.swing.JComboBox(tvFormatStr);

		try{

			final int[] vdi = ((DSCapture)graph).getActiveVideoDevice().getVDigSettings();

			int fIdx = 0;

			while ((vdi[0] >>= 1) != 0) { fIdx++; }

			tvf.setSelectedIndex(fIdx+1);

			tvf.addActionListener(new java.awt.event.ActionListener() {

				public void actionPerformed(java.awt.event.ActionEvent e) {

					if ((vdi[2] & (1 << (tvf.getSelectedIndex() - 1))) != 0) graph.getActiveVideoDevice().configureVDig((1 << (tvf.getSelectedIndex() - 1)), 3);

					else System.out.println("not supported");

				}

			});

		} catch (Exception e){

			System.out.println("Can't get / set TVFormat with this device: "+e.toString());

			tvf.setEnabled(false);

		}

		//lowerCtrls.add(cropCoords);

		lowerCtrls.add(fl);

		lowerCtrls.add(gi);

		//lowerCtrls.add(crop);

		//lowerCtrls.add(tvf);

		//lowerCtrls.add(continous);

		f.add(java.awt.BorderLayout.SOUTH, lowerCtrls);

		f.pack();

		f.setVisible(true);

		/**
		Don't do this at home. This demo relies on dsj closing and disposing off filtergraphs when the JVM exits. This is
		OK for a "open graph, do something & exit" style demo, but real world applications should take care of calling
		dispose() on filtergraphs they're done with themselves.
		**/

		f.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);

	}

	public void propertyChange(java.beans.PropertyChangeEvent pe) {

		if (DSJUtils.getEventType(pe) == DSFiltergraph.FORMAT_CHANGED) {

			f.add("Center", graph.asComponent());

			f.pack();

			int sf = vDev.getSelectedFormat(previewOut != null ? previewOut : captureOut);

			formatSelector.setSelectedIndex(sf);

			DSFilterInfo.DSMediaFormat dsmf = (DSFilterInfo.DSMediaFormat)(mf[sf]);

			SpinnerNumberModel model = (SpinnerNumberModel)(fpsSpinner.getModel());

			model.setMinimum((int)(dsmf.getFrameRateRange()[0])-5);

			model.setMaximum((int)(dsmf.getFrameRateRange()[1])+5);

			fpsSpinner.setModel(model);

			try{

				float currentRate = previewOut != null ? vDev.getFrameRate(previewOut) : vDev.getFrameRate(captureOut);

				fpsSpinner.setValue(new Integer((int)currentRate));

			} catch (Exception e){e.printStackTrace();}

			changingFormat = false;

		}

	}

	public static void main(String[] args){

		new MyCaptureFormats().createGraph();

	}


}
 
2 red5-client
    red5提供了2个jar ,一个是server.一个是client 用来连接red5服务器。
实例【捕获red5 demo publisher 的视频流】
首先打开red5 ->demo->publisher 


 
点击connect,翻开video页签,选择摄像头,点击start.最后点击public


 
【使用代码捕获】使用上图中的streamName
package com.oatos.red5;

import java.nio.ByteBuffer;
import java.util.Map;

import org.apache.mina.core.buffer.IoBuffer;
import org.red5.client.net.rtmp.ClientExceptionHandler;
import org.red5.client.net.rtmp.RTMPClient;
import org.red5.io.ITag;
import org.red5.io.ITagWriter;
import org.red5.io.flv.impl.FLVWriter;
import org.red5.io.flv.impl.Tag;
import org.red5.io.utils.ObjectMap;
import org.red5.server.api.event.IEvent;
import org.red5.server.api.event.IEventDispatcher;
import org.red5.server.api.event.IEventListener;
import org.red5.server.api.service.IPendingServiceCall;
import org.red5.server.api.service.IPendingServiceCallback;
import org.red5.server.net.rtmp.Channel;
import org.red5.server.net.rtmp.RTMPConnection;
import org.red5.server.net.rtmp.codec.RTMP;
import org.red5.server.net.rtmp.event.AudioData;
import org.red5.server.net.rtmp.event.IRTMPEvent;
import org.red5.server.net.rtmp.event.Notify;
import org.red5.server.net.rtmp.event.Ping;
import org.red5.server.net.rtmp.event.VideoData;
import org.red5.server.net.rtmp.message.Header;
import org.red5.server.net.rtmp.status.StatusCodes;
import org.red5.server.stream.IStreamData;

public class ClientTest extends RTMPClient {

	private String server = "localhost";

	private int port = 1935;

	private String application = "oflaDemo";
	// private String application = "live";

	// private String filename = "prometheus.flv";
	// private String filename = "NAPNAP.flv";
	// private String filename = "cameraFeed";
	private String filename = "stream1371547539364";

	// live stream (true) or vod stream (false)
	private boolean live = true;

	private static boolean finished = false;

	public static void main(String[] args) throws InterruptedException {

		final ClientTest player = new ClientTest();
		// decide whether or not the source is live or vod
		player.setLive(true);
		// connect
		player.connect();

		synchronized (ClientTest.class) {
			if (!finished)
				ClientTest.class.wait();
		}

		System.out.println("Ended");
	}

	public void connect() {
		setExceptionHandler(new ClientExceptionHandler() {
			@Override
			public void handleException(Throwable throwable) {
				throwable.printStackTrace();
			}
		});
		setStreamEventDispatcher(streamEventDispatcher);
		connect(server, port, application, connectCallback);
	}

	private IEventDispatcher streamEventDispatcher = new IEventDispatcher() {
		@SuppressWarnings("unchecked")
		public void dispatchEvent(IEvent event) {
			System.out.println("ClientStream.dispachEvent()" + event.toString());
			if (!(event instanceof IRTMPEvent)) {
				//logger.debug("skipping non rtmp event: " + event);
				return;
			}
			IRTMPEvent rtmpEvent = (IRTMPEvent) event;
			/*
			 * if (log.isDebugEnabled()) { log.debug("rtmp event: " +
			 * rtmpEvent.getHeader() + ", " +
			 * rtmpEvent.getClass().getSimpleName()); }
			 */
			if (!(rtmpEvent instanceof IStreamData)) {
				//log.debug("skipping non stream data");
				return;
			}
			if (rtmpEvent.getHeader().getSize() == 0) {
				//log.debug("skipping event where size == 0");
				return;
			}
			ITag tag = new Tag();
			tag.setDataType(rtmpEvent.getDataType());
			if (rtmpEvent instanceof VideoData) {
				int videoTs=0;
				 videoTs += rtmpEvent.getTimestamp();
				tag.setTimestamp(videoTs);
				//(VideoData)rtmpEvent.getData();
			} else if (rtmpEvent instanceof AudioData) {
				int audioTs=0;
				audioTs += rtmpEvent.getTimestamp();
				tag.setTimestamp(audioTs);
			}
			
			IoBuffer data = ((IStreamData) rtmpEvent).getData()
					.asReadOnlyBuffer();
			tag.setBodySize(data.limit());
			tag.setBody(data);
			// log.debug(data.toString());
			System.out.println(data.toString());
			try {
				//ITagWriter writer  = new FLVWriter(data, true);
				//writer.writeTag(tag);
			} catch (Exception e) {
				throw new RuntimeException(e);
			}
		}
	};

	private IPendingServiceCallback connectCallback = new IPendingServiceCallback() {
		public void resultReceived(IPendingServiceCall call) {
			System.out.println("connectCallback");
			ObjectMap<?, ?> map = (ObjectMap<?, ?>) call.getResult();
			String code = (String) map.get("code");
			if ("NetConnection.Connect.Rejected".equals(code)) {
				System.out.printf("Rejected: %s\n", map.get("description"));
				disconnect();
				synchronized (ClientTest.class) {
					finished = true;
					ClientTest.class.notifyAll();
				}
			} else if ("NetConnection.Connect.Success".equals(code)) {
				createStream(createStreamCallback);
			} else {
				System.out.printf("Unhandled response code: %s\n", code);
			}
		}
	};

	private IPendingServiceCallback createStreamCallback = new IPendingServiceCallback() {
		public void resultReceived(IPendingServiceCall call) {
			int streamId = (Integer) call.getResult();
			// live buffer 0.5s / vod buffer 4s
			if (live) {
				conn.ping(new Ping(Ping.CLIENT_BUFFER, streamId, 500));
				play(streamId, filename, -1, -1);
			} else {
				conn.ping(new Ping(Ping.CLIENT_BUFFER, streamId, 4000));
				play(streamId, filename, 0, -1);
			}
		}
	};

	@SuppressWarnings("unchecked")
	protected void onInvoke(RTMPConnection conn, Channel channel,
			Header header, Notify notify, RTMP rtmp) {
		super.onInvoke(conn, channel, header, notify, rtmp);

		System.out.println("onInvoke, header = " + header.toString());
		System.out.println("onInvoke, notify = " + notify.toString());
		System.out.println("onInvoke, rtmp = " + rtmp.toString());

		Object obj = notify.getCall().getArguments().length > 0 ? notify
				.getCall().getArguments()[0] : null;
		if (obj instanceof Map) {
			Map<String, String> map = (Map<String, String>) obj;
			String code = map.get("code");
			if (StatusCodes.NS_PLAY_STOP.equals(code)) {

				synchronized (ClientTest.class) {
					finished = true;
					ClientTest.class.notifyAll();
				}

				disconnect();
				System.out.println("Disconnected");
			}
		}

	}

	/**
	 * @return the live
	 */
	public boolean isLive() {
		return live;
	}

	/**
	 * @param live
	 *            the live to set
	 */
	public void setLive(boolean live) {
		this.live = live;
	};

}
 
3 dsj 中显示捕获的视频流||||||dsj调用本地摄像头向red5 发送视频流
package com.oatos.red5;

import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;

import javax.swing.JFrame;

import de.humatic.dsj.DSCapture;
import de.humatic.dsj.DSFilterInfo;
import de.humatic.dsj.DSFiltergraph;
import de.humatic.dsj.DSGraph;
import de.humatic.dsj.DSJUtils;
import de.humatic.dsj.DSMovie;
import de.humatic.dsj.JSampleBuffer;
import de.humatic.dsj.SampleBuffer;
import de.humatic.dsj.SwingMovieController;
import de.humatic.dsj.sink.RTMPSink;
import de.humatic.dsj.src.RTMPSource;
import de.humatic.dsj.src.Source;
import de.humatic.dsj.src.rtmp.ConnectionParameter;
import de.humatic.dsj.src.rtmp.RTMPMessage;
import de.humatic.dsj.src.rtp.RTPChannel;

public class DSJRTMP implements java.beans.PropertyChangeListener {

	private DSCapture graph;
	
	private String ip="localhost";
	private int port=1935;
	private String app="oflaDemo";

	private String url = "rtmp://localhost/oflaDemo";

	private String streamName = "stream1371608283848";

	public static void main(String[] args) throws Exception {
		//new DSJRTMP().TestRTMP();
		new DSJRTMP().testRTMPSoucre();

	}
	
	public JFrame addFrame(){
		
		JFrame f = new JFrame("dsj RTMP");
		f.pack();
		f.setVisible(true);
		f.setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
		
		return f;
	}
	
	
	/***
	 * 本地摄像头往 red5  发送视频流
	 * @throws Exception
	 */
	public void TestRTMP() throws Exception {
		Source source = new Source();
		// source.createGraph(arg0);

		//javax.swing.JFrame f = new javax.swing.JFrame("dsj SimpleCapture");

		DSFilterInfo[][] dsi = DSCapture.queryDevices();
		
		System.out.println(dsi);

		/** this sample only uses video **/

		graph = new DSCapture(DSFiltergraph.DD7, dsi[0][0], false, DSFilterInfo
				.doNotRender(), this);
		
	//	graph.createBufferStrategy(1024);
		
		System.out.println(graph);
		
		addFrame().add(java.awt.BorderLayout.CENTER, graph.asComponent());
		// f.add(java.awt.BorderLayout.SOUTH, new SwingMovieController(graph));

		 RTMPSink rtmpsink = new RTMPSink(graph,url,streamName,dsi[0][0],dsi[1][0],-1);

		 System.out.println(rtmpsink);

		// rtmpsink.sendRTMP(arg0);

		// DsSampleBuffer sb = new JSampleBuffer();
		// rtmpsink.sampleReceived();

		// RTPChannel channel = new RTPChannel();

		// RTMPMessage msg = new RTMPMessage();
//
//		f.pack();
//		f.setVisible(true);
//		f.setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
	}
	
	/**
	 * 连接red5 接收视频流
	 * @throws Exception
	 */
	void testRTMPSoucre() throws Exception{
		//ConnectionParameter params = new ConnectionParameter("", nested, AMF.OBJECT, ConnectionParameter.APPEND);
		
		java.beans.PropertyChangeListener pc = new java.beans.PropertyChangeListener(){

			@Override
			public void propertyChange(PropertyChangeEvent evt) {
				System.out.println("----"+evt);
				
			}};
	//	RTMPSource rs  = new RTMPSource(ip,port,app,streamName,pc);
		//RTMPSource rs  = new RTMPSource(url,"hobbit_vp6.flv", -1, this);
		RTMPSource rs  = new RTMPSource(url,streamName, -1, this);
		System.out.println(rs.getPath());
		System.out.println(rs.getMaxTimeLoaded());
		System.out.println(rs.getMediaTypes());
		
		//DSGraph ds = rs.createGraph(DSFiltergraph. VIDEO_BUFFER_REQUEST);
		//System.out.println(rs.getBuffered());
		//System.out.println(ds);
		
		
		DSMovie dm = new DSMovie("rtmp://localhost/oflaDemo",DSFiltergraph.DD7,this);
		//DSMovie dm = new DSMovie("F:/install_soft/red5/webapps/oflaDemo/streams/hobbit_vp6.flv",DSFiltergraph.DD7,this);
		
		//addFrame().add(java.awt.BorderLayout.CENTER,dm.asComponent());
	//	JFrame f = new JFrame("dsj RTMP");
		
		addFrame().add(java.awt.BorderLayout.CENTER, dm.asComponent());
		
		//f.pack();
	//	f.setVisible(true);
		//f.setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
	}

	@Override
	public void propertyChange(PropertyChangeEvent evt) {
		switch (DSJUtils.getEventType(evt)) {

		}
	}

}
这个类是有问题的,接受的到的流无法显示,可能需要转化成某种格式。
 
由于是匆匆整理,省略了red5 的安装,red5跟tomcat 一样使用起来简单,openmeetings 中的屏幕共享代码我是copy他的做一些修改,但是怎么去用java将他显示?不知道。要是用flex 简直soeasy.明天整理完再写上完整版的,希望本坛神人能help me ! 不然心里有个坎,过不去!!
  • 大小: 42.9 KB
  • 大小: 33.5 KB
  • 大小: 10.8 KB
   发表时间:2013-06-21  
项目上也用过RED5跟你的情况一样,做了一半失败了,可以说技术不够
0 请登录后投票
   发表时间:2013-08-29  
感觉用dsj的人不多,具体还没研究过
0 请登录后投票
论坛首页 Java企业应用版

跳转论坛:
Global site tag (gtag.js) - Google Analytics