10.9 C
London
Friday, February 16, 2024

ios – tips on how to get actual place in video as picture view?


I wish to give picture modifying function in my app. so I’m permitting person so as to add textual content to picture and after that I wish to convert it to video utilizing Ffmpeg command.here is what my screen looks like where user can edit photos

right here is my command that provides textual content and convert picture to video.
ffmpeg -loop 1 -i /var/cellular/Containers/Information/Utility/88F535C3-A300-456C-97BB-1A9B83EAEE7B/Paperwork/Compress_Picture/enter.jpg -filter_complex "[0]scale=1080:trunc(ow/a/2)*2[video0];[video0]drawtext=textual content="Dyjfyjyrjyfjyfkyfk":fontfile=/non-public/var/containers/Bundle/Utility/DE5C8DAA-4D66-4345-834A-89F8AC19DF9B/Clear Standing.app/avenyt.ttf:fontsize=66.55112651646448:fontcolor=#FFFFFF:x=349.92:y=930.051993067591" -c:v libx264 -t 5 -pix_fmt yuv420p -y /var/cellular/Containers/Information/Utility/88F535C3-A300-456C-97BB-1A9B83EAEE7B/Paperwork/Compress_Picture/output0.mp4

right here is my swift code to generate command.

var filterComplex = ""
var inputs = ""
var audioIndex = ""

if currentPhotoTextDataArray.comprises(the place: { $0.isLocation }) {
 // No less than one factor has isLocation set to true
 // Do one thing right here
 print("There's a minimum of one factor with isLocation == true")
                            inputs = "-i (inputPath) -i (self.locImagePath)"
                            audioIndex = "2"
                            
                        } else {
                            // No components have isLocation == true
                            print("No components have isLocation set to true")
                            inputs = "-i (inputPath)"
                            audioIndex = "1"
                        }
                        
                        for (index, textData) in currentPhotoTextDataArray.enumerated() {
                            print("x: (textData.xPosition), y: (textData.yPosition)")
                            let x = (textData.xPosition) * 1080 / self.photoViewWidth
                            let y = (textData.yPosition) * 1920 / self.photoViewHeight
                            
                            let fontSizeForWidth = (textData.fontSize * 1080) / self.photoViewWidth
                            let fontSizeForHeight = (textData.fontSize * 1920) / self.photoViewHeight
                            print("fontSizeForWidth: (fontSizeForWidth)")
                            print("fontSizeForHeight: (fontSizeForHeight)")
                            
                            let fontPath = textData.font.fontPath
                            let fontColor = textData.fontColor.toHexOrASS(format: "hex")
                            let backColor = textData.backColor?.toHexOrASS(format: "hex")
                            print("fontPath: (fontPath)")
                            print("fontColor: (fontColor)")
                            
                            let breakedText = self.addBreaks(in: textData.textual content, with: UIFont(identify: textData.font.fontName, measurement: fontSizeForHeight) ?? UIFont(), forWidth: 1080, padding: Int(x))
                            
                            if textData.isLocation {
                                print("Location is there.")
                                
                                let textFont = UIFont(identify: textData.font.fontName, measurement: fontSizeForHeight)
                                let attributes: [NSAttributedString.Key: Any] = [NSAttributedString.Key.font: textFont ?? UIFont()]
                                let measurement = (textData.textual content as NSString).measurement(withAttributes: attributes)
                                let textWidth = Int(measurement.width) + 130
                                
                                var endTimeLoc = 0.0
                                if let audioData = self.audioDataArray.first(the place: { $0.photoIndex == mainIndex }) {
                                    let period = audioData.audioEndTime - audioData.audioStartTime
                                    endTimeLoc = period
                                } else {
                                    endTimeLoc = 5
                                }
                                
                                let layerFilter = "[email protected]:measurement=(textWidth)x130[layer0];[video(index)][layer0]overlay=allow="between(t,0,(endTimeLoc))":x=(x):y=((y)-(overlay_h/2))[layer1];"
                                filterComplex += layerFilter
                                let imageFilter = "[1:v]scale=80:80[image];[layer1][image]overlay=allow="between(t,0,(endTimeLoc))":x=(x)+10:y=((y)-(overlay_h/2))[v(index)];"
                                filterComplex += imageFilter
                                
                                if index == currentPhotoTextDataArray.rely - 1 {
                                    let textFilter = "[v(index)]drawtext=textual content="(breakedText)":fontfile=(fontPath):fontsize=(fontSizeForHeight):fontcolor=(fontColor):x=((x)+100):y=((y)-(text_h/2))"
                                    filterComplex += textFilter
                                } else {
                                    let textFilter = "[v(index)]drawtext=textual content="(breakedText)":fontfile=(fontPath):fontsize=(fontSizeForHeight):fontcolor=(fontColor):x=((x)+100):y=((y)-(text_h/2))[video(index + 1)];"
                                    filterComplex += textFilter
                                }
                                
                            } else {
                                
                                let textBack = textData.backColor != nil ? ":field=1:boxcolor=(backColor ?? "")@0.8:boxborderw=25" : ""
                                
                                if index == currentPhotoTextDataArray.rely - 1 {
                                    let textFilter = "[video(index)]drawtext=textual content="(breakedText)":fontfile=(fontPath):fontsize=(fontSizeForHeight):fontcolor=(fontColor):x=(x):y=(y)(textBack)"
                                    filterComplex += textFilter
                                } else {
                                    let textFilter = "[video(index)]drawtext=textual content="(breakedText)":fontfile=(fontPath):fontsize=(fontSizeForHeight):fontcolor=(fontColor):x=(x):y=(y)(textBack)[video(index + 1)];"
                                    filterComplex += textFilter
                                }
                            }
                            
                        }
                        
                        if let audioData = self.audioDataArray.first(the place: { $0.photoIndex == mainIndex }) {
                            
                            let audioSTime = self.getSTimeAudio(index: mainIndex, secondsPhoto: Int(audioData.audioStartTime))
                            let audioETime = self.getETimeAudio(index: mainIndex, secondsPhoto: Int(audioData.audioEndTime))
                            let period = audioData.audioEndTime - audioData.audioStartTime
                            
                            command = "-loop 1 (inputs) -ss (audioSTime) -to (audioETime) -i "(audioData.audioURL.path)" -filter_complex "[0]scale=1080:trunc(ow/a/2)*2[video0];(filterComplex)[final_video]" -map "[final_video]":v -map (audioIndex):a -c:v libx264 -t (period) -pix_fmt yuv420p -y (outputURL.path)"
                            
                        } else {
                            command = "-loop 1 (inputs) -filter_complex "[0]scale=1080:trunc(ow/a/2)*2[video0];(filterComplex)" -c:v libx264 -t 5 -pix_fmt yuv420p -y (outputURL.path)"
                        }
                    }

I’m not getting actual place of textual content in generated video as added by person. if anybody is aware of please assist me with this.

Latest news
Related news

LEAVE A REPLY

Please enter your comment!
Please enter your name here