Related
I am trying to write a function that overlays an image at a rectangle with transparency over top of another image, However it doesn't layer the images it just erases the section that I overlay and the transparency cuts through the entire image. Here is my code.
public static void overlayImage(String imagePath, String overlayPath, int x, int y, int width, int height) {
Mat overlay = Imgcodecs.imread(overlayPath, Imgcodecs.IMREAD_UNCHANGED);
Mat image = Imgcodecs.imread(imagePath, Imgcodecs.IMREAD_UNCHANGED);
Rectangle rect = new Rectangle(x, y, width, height);
Imgproc.resize(overlay, overlay, rect.size());
Mat submat = image.submat(new Rect(rect.x, rect.y, overlay.cols(), overlay.rows()));
overlay.copyTo(submat);
Imgcodecs.imwrite(imagePath, image);
}
EDIT: Here are some example pictures:
Before:
After:
Found this function that does exactly what I needed.
public static void overlayImage(Mat background,Mat foreground,Mat output, Point location){
background.copyTo(output);
for(int y = (int) Math.max(location.y , 0); y < background.rows(); ++y){
int fY = (int) (y - location.y);
if(fY >= foreground.rows())
break;
for(int x = (int) Math.max(location.x, 0); x < background.cols(); ++x){
int fX = (int) (x - location.x);
if(fX >= foreground.cols()){
break;
}
double opacity;
double[] finalPixelValue = new double[4];
opacity = foreground.get(fY , fX)[3];
finalPixelValue[0] = background.get(y, x)[0];
finalPixelValue[1] = background.get(y, x)[1];
finalPixelValue[2] = background.get(y, x)[2];
finalPixelValue[3] = background.get(y, x)[3];
for(int c = 0; c < output.channels(); ++c){
if(opacity > 0){
double foregroundPx = foreground.get(fY, fX)[c];
double backgroundPx = background.get(y, x)[c];
float fOpacity = (float) (opacity / 255);
finalPixelValue[c] = ((backgroundPx * ( 1.0 - fOpacity)) + (foregroundPx * fOpacity));
if(c==3){
finalPixelValue[c] = foreground.get(fY,fX)[3];
}
}
}
output.put(y, x,finalPixelValue);
}
}
}
/**
* The purpose of this program is to make an image and turn it into a kaleidoscope
*
* #author (Danny Meijo)
* #version (07/27/2017)
*/
import java.awt.*;
import java.awt.geom.AffineTransform;
import java.awt.geom.Ellipse2D;
public class KaleidoscopeImage
{
private Picture canvas = null;
private Picture canvas2 = null;
private Picture pictureObj = null;
private Picture scaledPicture = null;
private Picture clippedPicture = null;
private Graphics g = null;
private Graphics g2 = null;
private Graphics gV2 = null;
private Graphics g2V2 = null;
private Graphics gV3 = null;
private Graphics g2V3 = null;
KaleidoscopeImage(Picture Canvas, Picture image, Picture Canvas2)
{
canvas = Canvas;
canvas2 = Canvas2;
pictureObj = image;
g = canvas.getGraphics();
g2 = (Graphics2D)g;
}
public Picture firstPanel()
{
g2.drawImage(pictureObj.getImage(), 0, canvas.getHeight() / 2, null);
Pixel bottomLeftPixel = null;
Pixel topRightPixel = null;
Color sourceColor1 = null;
for(int ty = 0, by = canvas.getHeight(); ty < canvas.getHeight() / 2; ty++, by--)
{
for(int lx = 0, rx = canvas.getWidth(); lx < canvas.getWidth() / 2; lx++, rx--)
{
bottomLeftPixel = canvas.getPixel(lx, by - 1);
sourceColor1 = bottomLeftPixel.getColor();
topRightPixel = canvas.getPixel(rx - 1, ty);
topRightPixel.setColor(sourceColor1);
}
}
Pixel sourcePixel = null;
Pixel targetPixel = null;
Color sourceColor2 = null;
Color targetColor = null;
for(int y = 0; y < canvas.getHeight() / 2; y++)
{
for(int lx = 0, rx = canvas.getWidth(); lx < canvas.getWidth() / 2; lx++, rx--)
{
sourcePixel = canvas.getPixel(rx - 1,y);
sourceColor2 = sourcePixel.getColor();
targetPixel = canvas2.getPixel(lx,y);
targetPixel.setColor(sourceColor2);
}
}
return canvas2;
}
public Picture secondPanel()
{
Pixel leftPixel = null;
Pixel rightPixel = null;
Color sourceColor = null;
for(int y = 0; y < canvas2.getHeight() / 2; y++)
{
for(int lx = 0, rx = canvas2.getWidth(); lx < canvas2.getWidth() / 2; lx++, rx--)
{
leftPixel = canvas2.getPixel(lx,y);
sourceColor = leftPixel.getColor();
rightPixel = canvas2.getPixel(rx - 1, y);
rightPixel.setColor(sourceColor);
}
}
return canvas2;
}
public Picture thirdPanel()
{
Pixel topPixel = null;
Pixel bottomPixel = null;
Color sourceColor = null;
for(int lx = 0, rx = canvas2.getWidth(); lx < canvas2.getWidth() / 2; lx++, rx--)
{
for(int ty = 0, by = canvas2.getHeight(); ty < canvas2.getHeight() / 2; ty++, by--)
{
topPixel = canvas2.getPixel(rx - 1, ty);
sourceColor = topPixel.getColor();
bottomPixel = canvas2.getPixel(rx - 1, by - 1);
bottomPixel.setColor(sourceColor);
}
}
return canvas2;
}
public Picture fourthPanel()
{
Pixel leftPixel = null;
Pixel rightPixel = null;
Color sourceColor = null;
for(int lx = 0, rx = canvas2.getWidth(); lx < canvas2.getWidth() / 2; lx++, rx--)
{
for(int ty = 0, by = canvas2.getHeight(); ty < canvas2.getHeight() / 2; ty++, by--)
{
leftPixel = canvas2.getPixel(rx - 1, by - 1);
sourceColor = leftPixel.getColor();
rightPixel = canvas2.getPixel(lx, by - 1);
rightPixel.setColor(sourceColor);
}
}
return canvas2;
}
public Picture scalePicture(double xFactor, double yFactor)
{
AffineTransform scaleTransform = new AffineTransform();
scaleTransform.scale(xFactor, yFactor);
scaledPicture = new Picture((int)(canvas2.getWidth() * xFactor), (int)(canvas2.getHeight() * yFactor));
gV2 = scaledPicture.getGraphics();
g2V2 = (Graphics2D)gV2;
g2V2.drawImage(canvas2.getImage(), scaleTransform, null);
return scaledPicture;
}
public Picture clipPicture(Color color)
{
Picture canvas3 = new Picture(canvas2.getWidth(), canvas2.getHeight());
Pixel sourcePixel = null;
Pixel targetPixel = null;
Color sourceColor = null;
Color targetColor = null;
for(int y = 0; y < canvas2.getHeight(); y++)
{
for(int x = 0; x < canvas.getWidth(); x++)
{
sourcePixel = canvas2.getPixel(x,y);
sourceColor = sourcePixel.getColor();
targetPixel = canvas3.getPixel(x,y);
targetPixel.setColor(sourceColor);
}
}
gV3 = canvas3.getGraphics();
g2V3 = (Graphics2D)gV3;
canvas3.setAllPixelsToAColor(color);
Ellipse2D.Double clip = new Ellipse2D.Double(0,0, canvas3.getHeight(), canvas3.getWidth());
g2V3.setClip(clip);
g2V3.drawImage(canvas2.getImage(), 0, 0, canvas3.getHeight(), canvas3.getWidth(), null);
return canvas3;
}
}
Sorry, this is my first post, and I am also very new to java, since I'm learning it over the summer. I was not sure how to cut it to just the parts that I need, but the problem I'm having is in the scalePicture method. I was copying what I saw in a demo program to scale the image down to 0.75x0.75. But, in my program, there is an error with the drawImage method, where as the demo progam had no error.
If you are curious this is the demo that I was copying:
import java.awt.geom.AffineTransform;
import java.awt.Graphics;
import java.awt.Graphics2D;
class ScalingDemo
{
private Picture originalPicture = null;
private Picture newPicture = null;
private Graphics g = null;
private Graphics2D g2 = null;
ScalingDemo(Picture pic)
{
originalPicture = pic;
}
public Picture scalePicture(double xFactor, double yFactor)
{
AffineTransform scaleTransform = new AffineTransform();
scaleTransform.scale(xFactor, yFactor);
newPicture = new Picture((int)(originalPicture.getWidth()*xFactor), (int)(originalPicture.getHeight()*yFactor));
g = newPicture.getGraphics();
g2 = (Graphics2D)g;
g2.drawImage(originalPicture.getImage(), scaleTransform, null);
return newPicture;
}
}
Looks like the error is at line:
g2V2.drawImage(canvas2.getImage(), scaleTransform, null); - there's no such method in java.awt.Graphics interface.
You should use method with another signature:
drawImage(Image img, int x, int y, ImageObserver observer) - see here
Now these days i am working on Rotating wheel likely attached screenshot.
I've found code from github https://github.com/R4md4c/AndroidRotaryWheelView
The difficulty is that i am not able to keep icons in vertical position during rotation of the wheel. Icons being rotated in form of its Wheel's Segment While it is required to keep them in vertical position as they are being displayed initially (as per screenshot-1).
I have to customize the following code where i need to change the Bounds for particular drawable's rect.
I find myself unable to achieve the exact calculation in the reference.
Any help would be highly appreciated.
#Override
public void onDraw(Canvas canvas) {
// set the height of Wheel childs items
canvas.scale(getWidth() / mViewRect.width(), getHeight() / 2
/ mViewRect.width(), xPosition, yPosition);
canvas.save(Canvas.MATRIX_SAVE_FLAG); // Saving the canvas and later
// restoring it so only this
// image will be rotated.
canvas.rotate((float) mRotationAngle, xPosition, yPosition);
for (int i = 0; i < mWedges.length; i++) {
Wedge f = mWedges[i];
mPaint.setColor(SEGMENT_COLOR);
mPaint.setStyle(Paint.Style.FILL_AND_STROKE);
canvas.drawPath(f, mPaint);
Rect rf = iconRect[i];
if ((mMenuEntries.get(i).getIcon() != 0)
&& (mMenuEntries.get(i).getLabel() != null)) {
System.out.println("the canvasd drawn ........");
// This will look for a "new line" and split into multiple lines
String menuItemName = mMenuEntries.get(i).getLabel();
String[] stringArray = menuItemName.split("\n");
mPaint.setColor(textColor);
mPaint.setStyle(Paint.Style.FILL);
// mPaint.setTextSize(textSize);
Rect rect = new Rect();
float textHeight = 0;
for (int j = 0; j < stringArray.length; j++) {
mPaint.getTextBounds(stringArray[j], 0,
stringArray[j].length(), rect);
textHeight = textHeight + (rect.height() + 3);
}
Rect rf2 = new Rect();
rf2.set(rf.left, rf.top - ((int) textHeight / 2), rf.right,
rf.bottom - ((int) textHeight / 2));
float textBottom = rf2.bottom;
for (int j = 0; j < stringArray.length; j++) {
mPaint.getTextBounds(stringArray[j], 0,
stringArray[j].length(), rect);
float textLeft = rf.centerX() - rect.width() / 2;
textBottom = textBottom + (rect.height() + 3);
mPaint.setTextSize(scalePX(8));
canvas.drawText(stringArray[j], textLeft - rect.left,
textBottom - rect.bottom, mPaint);
}
// canvas.rotate((float)mRotationAngle,
// rf.top-((int)textHeight/2), rf.bottom-((int)textHeight/2));
int index = checkSelection(canvas);
rf2 = rotaionRf(rf2, mRotationAngle);
if (i == index) {
// Puts in the Icon
Drawable drawable = getResources().getDrawable(
mMenuEntries.get(i).getIconSelected());
drawable.setBounds(rf2);
drawable.draw(canvas);
} else {
// Puts in the Icon
Drawable drawable = getResources().getDrawable(
mMenuEntries.get(i).getIcon());
drawable.setBounds(rf2);
drawable.draw(canvas);
}
// Icon Only
} else if (mMenuEntries.get(i).getIcon() != 0) {
System.out.println("the canvasd drawn ELSE........");
// Puts in the Icon
Drawable drawable = getResources().getDrawable(
mMenuEntries.get(i).getIconSelected());
drawable.setBounds(rf);
drawable.draw(canvas);
// Text Only
} else {
// Puts in the Text if no Icon
mPaint.setColor(this.textColor);
/*
* if (f != enabled && Wedge2Shown == true) {
* mPaint.setAlpha(disabledAlpha); } else {
* mPaint.setAlpha(textAlpha); }
*/
// mPaint.setAlpha(textAlpha);
mPaint.setStyle(Paint.Style.FILL);
// mPaint.setTextSize(textSize);
// This will look for a "new line" and split into multiple lines
String menuItemName = mMenuEntries.get(i).getLabel();
String[] stringArray = menuItemName.split("\n");
// gets total height
Rect rect = new Rect();
float textHeight = 0;
for (int j = 0; j < stringArray.length; j++) {
mPaint.getTextBounds(stringArray[j], 0,
stringArray[j].length(), rect);
textHeight = textHeight + (rect.height() + 3);
}
float textBottom = rf.centerY() - (textHeight / 2);
for (int j = 0; j < stringArray.length; j++) {
mPaint.getTextBounds(stringArray[j], 0,
stringArray[j].length(), rect);
float textLeft = rf.centerX() - rect.width() / 2;
textBottom = textBottom + (rect.height() + 3);
canvas.drawText(stringArray[j], textLeft - rect.left,
textBottom - rect.bottom, mPaint);
}
// canvas.drawTextOnPath(text, path, hOffset, vOffset, paint)
// canvas.rotate((float)mRotationAngle, xPosition, yPosition );
// canvas.drawRect(rf, mPaint);
}
// canvas.restore();
}
// canvas.restore();
canvas.restore();
// System.out.println()
canvas.save();
canvas.restore();
mPaint.setShader(mShader);
// mPaint.setAlpha(0x66);
// Draw the Selection Segment
if (mSelectionWedge != null) {
canvas.drawPath(mSelectionWedge, mPaint);
// canvas.drawRect(mSelectionWedge.getWedgeRegion().getBounds(),
// mPaint);
}
mPaint.setShader(null);
int index = checkSelection(canvas);
System.out.println("the index=====" + index);
if (checkSelection(canvas) != -1) {
}
}
on using canvas.rotate u have to make sure the x&y values u give it are the MAIN wheel center.
I want to be able to take an animated GIF as input, count the frames (and perhaps other metadata), and convert each to a BufferedImage.
How can I do this?
If you want all the frames to be the same size (for optimized GIFs) try something like this:
try {
String[] imageatt = new String[]{
"imageLeftPosition",
"imageTopPosition",
"imageWidth",
"imageHeight"
};
ImageReader reader = (ImageReader)ImageIO.getImageReadersByFormatName("gif").next();
ImageInputStream ciis = ImageIO.createImageInputStream(new File("house2.gif"));
reader.setInput(ciis, false);
int noi = reader.getNumImages(true);
BufferedImage master = null;
for (int i = 0; i < noi; i++) {
BufferedImage image = reader.read(i);
IIOMetadata metadata = reader.getImageMetadata(i);
Node tree = metadata.getAsTree("javax_imageio_gif_image_1.0");
NodeList children = tree.getChildNodes();
for (int j = 0; j < children.getLength(); j++) {
Node nodeItem = children.item(j);
if(nodeItem.getNodeName().equals("ImageDescriptor")){
Map<String, Integer> imageAttr = new HashMap<String, Integer>();
for (int k = 0; k < imageatt.length; k++) {
NamedNodeMap attr = nodeItem.getAttributes();
Node attnode = attr.getNamedItem(imageatt[k]);
imageAttr.put(imageatt[k], Integer.valueOf(attnode.getNodeValue()));
}
if(i==0){
master = new BufferedImage(imageAttr.get("imageWidth"), imageAttr.get("imageHeight"), BufferedImage.TYPE_INT_ARGB);
}
master.getGraphics().drawImage(image, imageAttr.get("imageLeftPosition"), imageAttr.get("imageTopPosition"), null);
}
}
ImageIO.write(master, "GIF", new File( i + ".gif"));
}
} catch (IOException e) {
e.printStackTrace();
}
None of the answers here are correct and suitable for animation. There are many problems in each solution so I wrote something that actually works with all gif files. For instance, this takes into account the actual width and height of the image instead of taking the width and height of the first frame assuming it will fill the entire canvas, no, unfortunately it's not that simple. Second, this doesn't leave any transparent pickles. Third, this takes into account disposal Methods. Fourth, this gives you delays between frames (* 10 if you want to use it in Thread.sleep()).
private ImageFrame[] readGif(InputStream stream) throws IOException{
ArrayList<ImageFrame> frames = new ArrayList<ImageFrame>(2);
ImageReader reader = (ImageReader) ImageIO.getImageReadersByFormatName("gif").next();
reader.setInput(ImageIO.createImageInputStream(stream));
int lastx = 0;
int lasty = 0;
int width = -1;
int height = -1;
IIOMetadata metadata = reader.getStreamMetadata();
Color backgroundColor = null;
if(metadata != null) {
IIOMetadataNode globalRoot = (IIOMetadataNode) metadata.getAsTree(metadata.getNativeMetadataFormatName());
NodeList globalColorTable = globalRoot.getElementsByTagName("GlobalColorTable");
NodeList globalScreeDescriptor = globalRoot.getElementsByTagName("LogicalScreenDescriptor");
if (globalScreeDescriptor != null && globalScreeDescriptor.getLength() > 0){
IIOMetadataNode screenDescriptor = (IIOMetadataNode) globalScreeDescriptor.item(0);
if (screenDescriptor != null){
width = Integer.parseInt(screenDescriptor.getAttribute("logicalScreenWidth"));
height = Integer.parseInt(screenDescriptor.getAttribute("logicalScreenHeight"));
}
}
if (globalColorTable != null && globalColorTable.getLength() > 0){
IIOMetadataNode colorTable = (IIOMetadataNode) globalColorTable.item(0);
if (colorTable != null) {
String bgIndex = colorTable.getAttribute("backgroundColorIndex");
IIOMetadataNode colorEntry = (IIOMetadataNode) colorTable.getFirstChild();
while (colorEntry != null) {
if (colorEntry.getAttribute("index").equals(bgIndex)) {
int red = Integer.parseInt(colorEntry.getAttribute("red"));
int green = Integer.parseInt(colorEntry.getAttribute("green"));
int blue = Integer.parseInt(colorEntry.getAttribute("blue"));
backgroundColor = new Color(red, green, blue);
break;
}
colorEntry = (IIOMetadataNode) colorEntry.getNextSibling();
}
}
}
}
BufferedImage master = null;
boolean hasBackround = false;
for (int frameIndex = 0;; frameIndex++) {
BufferedImage image;
try{
image = reader.read(frameIndex);
}catch (IndexOutOfBoundsException io){
break;
}
if (width == -1 || height == -1){
width = image.getWidth();
height = image.getHeight();
}
IIOMetadataNode root = (IIOMetadataNode) reader.getImageMetadata(frameIndex).getAsTree("javax_imageio_gif_image_1.0");
IIOMetadataNode gce = (IIOMetadataNode) root.getElementsByTagName("GraphicControlExtension").item(0);
NodeList children = root.getChildNodes();
int delay = Integer.valueOf(gce.getAttribute("delayTime"));
String disposal = gce.getAttribute("disposalMethod");
if (master == null){
master = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
master.createGraphics().setColor(backgroundColor);
master.createGraphics().fillRect(0, 0, master.getWidth(), master.getHeight());
hasBackround = image.getWidth() == width && image.getHeight() == height;
master.createGraphics().drawImage(image, 0, 0, null);
}else{
int x = 0;
int y = 0;
for (int nodeIndex = 0; nodeIndex < children.getLength(); nodeIndex++){
Node nodeItem = children.item(nodeIndex);
if (nodeItem.getNodeName().equals("ImageDescriptor")){
NamedNodeMap map = nodeItem.getAttributes();
x = Integer.valueOf(map.getNamedItem("imageLeftPosition").getNodeValue());
y = Integer.valueOf(map.getNamedItem("imageTopPosition").getNodeValue());
}
}
if (disposal.equals("restoreToPrevious")){
BufferedImage from = null;
for (int i = frameIndex - 1; i >= 0; i--){
if (!frames.get(i).getDisposal().equals("restoreToPrevious") || frameIndex == 0){
from = frames.get(i).getImage();
break;
}
}
{
ColorModel model = from.getColorModel();
boolean alpha = from.isAlphaPremultiplied();
WritableRaster raster = from.copyData(null);
master = new BufferedImage(model, raster, alpha, null);
}
}else if (disposal.equals("restoreToBackgroundColor") && backgroundColor != null){
if (!hasBackround || frameIndex > 1){
master.createGraphics().fillRect(lastx, lasty, frames.get(frameIndex - 1).getWidth(), frames.get(frameIndex - 1).getHeight());
}
}
master.createGraphics().drawImage(image, x, y, null);
lastx = x;
lasty = y;
}
{
BufferedImage copy;
{
ColorModel model = master.getColorModel();
boolean alpha = master.isAlphaPremultiplied();
WritableRaster raster = master.copyData(null);
copy = new BufferedImage(model, raster, alpha, null);
}
frames.add(new ImageFrame(copy, delay, disposal, image.getWidth(), image.getHeight()));
}
master.flush();
}
reader.dispose();
return frames.toArray(new ImageFrame[frames.size()]);
}
And the ImageFrame class:
import java.awt.image.BufferedImage;
public class ImageFrame {
private final int delay;
private final BufferedImage image;
private final String disposal;
private final int width, height;
public ImageFrame (BufferedImage image, int delay, String disposal, int width, int height){
this.image = image;
this.delay = delay;
this.disposal = disposal;
this.width = width;
this.height = height;
}
public ImageFrame (BufferedImage image){
this.image = image;
this.delay = -1;
this.disposal = null;
this.width = -1;
this.height = -1;
}
public BufferedImage getImage() {
return image;
}
public int getDelay() {
return delay;
}
public String getDisposal() {
return disposal;
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
}
Right, I have never done anything even slightly like this before, but a bit of Googling and fiddling in Java got me this:
public ArrayList<BufferedImage> getFrames(File gif) throws IOException{
ArrayList<BufferedImage> frames = new ArrayList<BufferedImage>();
ImageReader ir = new GIFImageReader(new GIFImageReaderSpi());
ir.setInput(ImageIO.createImageInputStream(gif));
for(int i = 0; i < ir.getNumImages(true); i++)
frames.add(ir.getRawImageType(i).createBufferedImage(ir.getWidth(i), ir.getHeight(i)));
return frames;
}
Edit: see Ansel Zandegran's modification to my answer.
To split an animated GIF into separate BufferedImage frames:
try {
ImageReader reader = ImageIO.getImageReadersByFormatName("gif").next();
File input = new File("input.gif");
ImageInputStream stream = ImageIO.createImageInputStream(input);
reader.setInput(stream);
int count = reader.getNumImages(true);
for (int index = 0; index < count; index++) {
BufferedImage frame = reader.read(index);
// Here you go
}
} catch (IOException ex) {
// An I/O problem has occurred
}
Alex's answer covers most cases, but it does have a couple of problems. It doesn't handle transparency correctly (at least according to common convention) and it is applying the current frame's disposal method to the previous frame which is incorrect. Here's a version that does handle those cases correctly:
private ImageFrame[] readGIF(ImageReader reader) throws IOException {
ArrayList<ImageFrame> frames = new ArrayList<ImageFrame>(2);
int width = -1;
int height = -1;
IIOMetadata metadata = reader.getStreamMetadata();
if (metadata != null) {
IIOMetadataNode globalRoot = (IIOMetadataNode) metadata.getAsTree(metadata.getNativeMetadataFormatName());
NodeList globalScreenDescriptor = globalRoot.getElementsByTagName("LogicalScreenDescriptor");
if (globalScreenDescriptor != null && globalScreenDescriptor.getLength() > 0) {
IIOMetadataNode screenDescriptor = (IIOMetadataNode) globalScreenDescriptor.item(0);
if (screenDescriptor != null) {
width = Integer.parseInt(screenDescriptor.getAttribute("logicalScreenWidth"));
height = Integer.parseInt(screenDescriptor.getAttribute("logicalScreenHeight"));
}
}
}
BufferedImage master = null;
Graphics2D masterGraphics = null;
for (int frameIndex = 0;; frameIndex++) {
BufferedImage image;
try {
image = reader.read(frameIndex);
} catch (IndexOutOfBoundsException io) {
break;
}
if (width == -1 || height == -1) {
width = image.getWidth();
height = image.getHeight();
}
IIOMetadataNode root = (IIOMetadataNode) reader.getImageMetadata(frameIndex).getAsTree("javax_imageio_gif_image_1.0");
IIOMetadataNode gce = (IIOMetadataNode) root.getElementsByTagName("GraphicControlExtension").item(0);
int delay = Integer.valueOf(gce.getAttribute("delayTime"));
String disposal = gce.getAttribute("disposalMethod");
int x = 0;
int y = 0;
if (master == null) {
master = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
masterGraphics = master.createGraphics();
masterGraphics.setBackground(new Color(0, 0, 0, 0));
} else {
NodeList children = root.getChildNodes();
for (int nodeIndex = 0; nodeIndex < children.getLength(); nodeIndex++) {
Node nodeItem = children.item(nodeIndex);
if (nodeItem.getNodeName().equals("ImageDescriptor")) {
NamedNodeMap map = nodeItem.getAttributes();
x = Integer.valueOf(map.getNamedItem("imageLeftPosition").getNodeValue());
y = Integer.valueOf(map.getNamedItem("imageTopPosition").getNodeValue());
}
}
}
masterGraphics.drawImage(image, x, y, null);
BufferedImage copy = new BufferedImage(master.getColorModel(), master.copyData(null), master.isAlphaPremultiplied(), null);
frames.add(new ImageFrame(copy, delay, disposal));
if (disposal.equals("restoreToPrevious")) {
BufferedImage from = null;
for (int i = frameIndex - 1; i >= 0; i--) {
if (!frames.get(i).getDisposal().equals("restoreToPrevious") || frameIndex == 0) {
from = frames.get(i).getImage();
break;
}
}
master = new BufferedImage(from.getColorModel(), from.copyData(null), from.isAlphaPremultiplied(), null);
masterGraphics = master.createGraphics();
masterGraphics.setBackground(new Color(0, 0, 0, 0));
} else if (disposal.equals("restoreToBackgroundColor")) {
masterGraphics.clearRect(x, y, image.getWidth(), image.getHeight());
}
}
reader.dispose();
return frames.toArray(new ImageFrame[frames.size()]);
}
private class ImageFrame {
private final int delay;
private final BufferedImage image;
private final String disposal;
public ImageFrame(BufferedImage image, int delay, String disposal) {
this.image = image;
this.delay = delay;
this.disposal = disposal;
}
public BufferedImage getImage() {
return image;
}
public int getDelay() {
return delay;
}
public String getDisposal() {
return disposal;
}
}
There is a good description of how GIF animations work in this ImageMagick tutorial.
I wrote a GIF image decoder on my own and released it under the Apache License 2.0 on GitHub. You can download it here: https://github.com/DhyanB/Open-Imaging. Example usage:
void example(final byte[] data) throws Exception {
final GifImage gif = GifDecoder .read(data);
final int width = gif.getWidth();
final int height = gif.getHeight();
final int background = gif.getBackgroundColor();
final int frameCount = gif.getFrameCount();
for (int i = 0; i < frameCount; i++) {
final BufferedImage img = gif.getFrame(i);
final int delay = gif.getDelay(i);
ImageIO.write(img, "png", new File(OUTPATH + "frame_" + i + ".png"));
}
}
The decoder supports GIF87a, GIF89a, animation, transparency and interlacing. Frames will have the width and height of the image itself and be placed on the correct position on the canvas. It respects frame transparency and disposal methods. Checkout the project description for more details such as the handling of background colors.
Additionally, the decoder doesn't suffer from this ImageIO bug: ArrayIndexOutOfBoundsException: 4096 while reading gif file.
I'd be happy to get some feedback. I've been testing with a representive set of images, however, some real field testing would be good.
Using c24w's solution, replace:
frames.add(ir.getRawImageType(i).createBufferedImage(ir.getWidth(i), ir.getHeight(i)));
With:
frames.add(ir.read(i));
I need to be able to capture an image of a GLSurfaceView at certain moment in time. I have the following code:
relative.setDrawingCacheEnabled(true);
screenshot = Bitmap.createBitmap(relative.getDrawingCache());
relative.setDrawingCacheEnabled(false);
Log.v(TAG, "Screenshot height: " + screenshot.getHeight());
image.setImageBitmap(screenshot);
The GLSurfaceView is contained within a RelativeLayout, but I have also tries it straight using the GLSurfaceView to try and capture the image. With this I think the screen captures a transparent image, i.e. nothing there. Any help will be appreciated.
SurfaceView and GLSurfaceView punch holes in their windows to allow their surfaces to be displayed. In other words, they have transparent areas.
So you cannot capture an image by calling GLSurfaceView.getDrawingCache().
If you want to get an image from GLSurfaceView, you should invoke gl.glReadPixels() in GLSurfaceView.onDrawFrame().
I patched createBitmapFromGLSurface method and call it in onDrawFrame().
(The original code might be from skuld's code.)
private Bitmap createBitmapFromGLSurface(int x, int y, int w, int h, GL10 gl)
throws OutOfMemoryError {
int bitmapBuffer[] = new int[w * h];
int bitmapSource[] = new int[w * h];
IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer);
intBuffer.position(0);
try {
gl.glReadPixels(x, y, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, intBuffer);
int offset1, offset2;
for (int i = 0; i < h; i++) {
offset1 = i * w;
offset2 = (h - i - 1) * w;
for (int j = 0; j < w; j++) {
int texturePixel = bitmapBuffer[offset1 + j];
int blue = (texturePixel >> 16) & 0xff;
int red = (texturePixel << 16) & 0x00ff0000;
int pixel = (texturePixel & 0xff00ff00) | red | blue;
bitmapSource[offset2 + j] = pixel;
}
}
} catch (GLException e) {
return null;
}
return Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.ARGB_8888);
}
Here is a complete solution if you are using a third party library that you just 'pass in' a GLSurfaceView defined in your layout. You won't have a handle on the onDrawFrame() of the renderer, this can be a problem...
To do this you need to queue it up for the GLSurfaceView to handle.
private GLSurfaceView glSurfaceView; // findById() in onCreate
private Bitmap snapshotBitmap;
private interface BitmapReadyCallbacks {
void onBitmapReady(Bitmap bitmap);
}
/* Usage code
captureBitmap(new BitmapReadyCallbacks() {
#Override
public void onBitmapReady(Bitmap bitmap) {
someImageView.setImageBitmap(bitmap);
}
});
*/
// supporting methods
private void captureBitmap(final BitmapReadyCallbacks bitmapReadyCallbacks) {
glSurfaceView.queueEvent(new Runnable() {
#Override
public void run() {
EGL10 egl = (EGL10) EGLContext.getEGL();
GL10 gl = (GL10)egl.eglGetCurrentContext().getGL();
snapshotBitmap = createBitmapFromGLSurface(0, 0, glSurfaceView.getWidth(), glSurfaceView.getHeight(), gl);
runOnUiThread(new Runnable() {
#Override
public void run() {
bitmapReadyCallbacks.onBitmapReady(snapshotBitmap);
}
});
}
});
}
// from other answer in this question
private Bitmap createBitmapFromGLSurface(int x, int y, int w, int h, GL10 gl) {
int bitmapBuffer[] = new int[w * h];
int bitmapSource[] = new int[w * h];
IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer);
intBuffer.position(0);
try {
gl.glReadPixels(x, y, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, intBuffer);
int offset1, offset2;
for (int i = 0; i < h; i++) {
offset1 = i * w;
offset2 = (h - i - 1) * w;
for (int j = 0; j < w; j++) {
int texturePixel = bitmapBuffer[offset1 + j];
int blue = (texturePixel >> 16) & 0xff;
int red = (texturePixel << 16) & 0x00ff0000;
int pixel = (texturePixel & 0xff00ff00) | red | blue;
bitmapSource[offset2 + j] = pixel;
}
}
} catch (GLException e) {
Log.e(TAG, "createBitmapFromGLSurface: " + e.getMessage(), e);
return null;
}
return Bitmap.createBitmap(bitmapSource, w, h, Config.ARGB_8888);
}
Note: In this code, when I click the Button, it takes the screenshot as Image and saves it in sdcard location. I used boolean condition and an if statement in onDraw method, because the renderer class may call the onDraw method anytime and anyway, and without the if this code may save lots of images in the memory card.
MainActivity class:
protected boolean printOptionEnable = false;
saveImageButton.setOnClickListener(new OnClickListener() {
#Override
public void onClick(View v) {
Log.v("hari", "pan button clicked");
isSaveClick = true;
myRenderer.printOptionEnable = isSaveClick;
}
});
MyRenderer class:
int width_surface , height_surface ;
#Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
Log.i("JO", "onSurfaceChanged");
// Adjust the viewport based on geometry changes,
// such as screen rotation
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
width_surface = width ;
height_surface = height ;
}
#Override
public void onDrawFrame(GL10 gl) {
try {
if (printOptionEnable) {
printOptionEnable = false ;
Log.i("hari", "printOptionEnable if condition:" + printOptionEnable);
int w = width_surface ;
int h = height_surface ;
Log.i("hari", "w:"+w+"-----h:"+h);
int b[]=new int[(int) (w*h)];
int bt[]=new int[(int) (w*h)];
IntBuffer buffer=IntBuffer.wrap(b);
buffer.position(0);
GLES20.glReadPixels(0, 0, w, h,GLES20.GL_RGBA,GLES20.GL_UNSIGNED_BYTE, buffer);
for(int i=0; i<h; i++)
{
//remember, that OpenGL bitmap is incompatible with Android bitmap
//and so, some correction need.
for(int j=0; j<w; j++)
{
int pix=b[i*w+j];
int pb=(pix>>16)&0xff;
int pr=(pix<<16)&0x00ff0000;
int pix1=(pix&0xff00ff00) | pr | pb;
bt[(h-i-1)*w+j]=pix1;
}
}
Bitmap inBitmap = null ;
if (inBitmap == null || !inBitmap.isMutable()
|| inBitmap.getWidth() != w || inBitmap.getHeight() != h) {
inBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
}
//Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
inBitmap.copyPixelsFromBuffer(buffer);
//return inBitmap ;
// return Bitmap.createBitmap(bt, w, h, Bitmap.Config.ARGB_8888);
inBitmap = Bitmap.createBitmap(bt, w, h, Bitmap.Config.ARGB_8888);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
inBitmap.compress(CompressFormat.JPEG, 90, bos);
byte[] bitmapdata = bos.toByteArray();
ByteArrayInputStream fis = new ByteArrayInputStream(bitmapdata);
final Calendar c=Calendar.getInstance();
long mytimestamp=c.getTimeInMillis();
String timeStamp=String.valueOf(mytimestamp);
String myfile="hari"+timeStamp+".jpeg";
dir_image = new File(Environment.getExternalStorageDirectory()+File.separator+
"printerscreenshots"+File.separator+"image");
dir_image.mkdirs();
try {
File tmpFile = new File(dir_image,myfile);
FileOutputStream fos = new FileOutputStream(tmpFile);
byte[] buf = new byte[1024];
int len;
while ((len = fis.read(buf)) > 0) {
fos.write(buf, 0, len);
}
fis.close();
fos.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
Log.v("hari", "screenshots:"+dir_image.toString());
}
} catch(Exception e) {
e.printStackTrace();
}
}
You can use a GLTextureView extending TextureView instead of GlsurfaceView to show you OpenGL data.
See: http://stackoverflow.com/questions/12061419/converting-from-glsurfaceview-to-textureview-via-gltextureview
As the GLTextureView extends from TextureView, it has a getBitmap function that should work.
myGlTextureView.getBitmap(int width, int height)